mymuseum-visitapp/lib/Screens/Article/audio_player_floating.dart
2024-07-04 21:04:24 +02:00

267 lines
8.4 KiB
Dart

import 'dart:io';
import 'dart:typed_data';
import 'package:flutter/material.dart';
import 'package:mymuseum_visitapp/Models/visitContext.dart';
import 'package:mymuseum_visitapp/app_context.dart';
import 'package:mymuseum_visitapp/constants.dart';
import 'package:provider/provider.dart';
import 'package:just_audio/just_audio.dart';
import 'package:just_audio_cache/just_audio_cache.dart';
class AudioPlayerFloatingContainer extends StatefulWidget {
const AudioPlayerFloatingContainer({Key? key, required this.file, required this.audioBytes, required this.resourceURl, required this.isAuto}) : super(key: key);
final File? file;
final Uint8List? audioBytes;
final String resourceURl;
final bool isAuto;
@override
State<AudioPlayerFloatingContainer> createState() => _AudioPlayerFloatingContainerState();
}
class _AudioPlayerFloatingContainerState extends State<AudioPlayerFloatingContainer> {
AudioPlayer player = AudioPlayer();
Uint8List? audiobytes = null;
bool isplaying = false;
bool audioplayed = false;
int currentpos = 0;
int maxduration = 100;
Duration? durationAudio;
String currentpostlabel = "00:00";
@override
void initState() {
//print("IN INITSTATE AUDDDIOOOO");
Future.delayed(Duration.zero, () async {
if(widget.audioBytes != null) {
audiobytes = widget.audioBytes!;
}
if(widget.file != null) {
audiobytes = await fileToUint8List(widget.file!);
}
player.durationStream.listen((Duration? d) { //get the duration of audio
if(d != null) {
maxduration = d.inSeconds;
durationAudio = d;
}
});
//player.bufferedPositionStream
player.positionStream.listen((event) {
if(durationAudio != null) {
currentpos = event.inMilliseconds; //get the current position of playing audio
//generating the duration label
int shours = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inHours;
int sminutes = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inMinutes;
int sseconds = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inSeconds;
int rminutes = sminutes - (shours * 60);
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
currentpostlabel = "$minutesToShow:$secondsToShow";
setState(() {
//refresh the UI
if(currentpos > player.duration!.inMilliseconds) {
print("RESET ALL");
player.stop();
player.seek(const Duration(seconds: 0));
isplaying = false;
audioplayed = false;
currentpostlabel = "00:00";
}
});
}
});
/*player.onPositionChanged.listen((Duration p){
currentpos = p.inMilliseconds; //get the current position of playing audio
//generating the duration label
int shours = Duration(milliseconds:currentpos).inHours;
int sminutes = Duration(milliseconds:currentpos).inMinutes;
int sseconds = Duration(milliseconds:currentpos).inSeconds;
int rminutes = sminutes - (shours * 60);
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
currentpostlabel = "$minutesToShow:$secondsToShow";
setState(() {
//refresh the UI
});
});*/
if(audiobytes != null) {
print("GOT AUDIOBYYYTES - LOCALLY SOSO");
await player.setAudioSource(LoadedSource(audiobytes!));
} else {
print("GET SOUND BY URL");
await player.dynamicSet(url: widget.resourceURl);
}
if(widget.isAuto) {
//player.play(BytesSource(audiobytes));
//
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}
});
super.initState();
}
@override
void dispose() {
player.stop();
player.dispose();
super.dispose();
}
Future<Uint8List> fileToUint8List(File file) async {
List<int> bytes = await file.readAsBytes();
return Uint8List.fromList(bytes);
}
@override
Widget build(BuildContext context) {
final appContext = Provider.of<AppContext>(context);
VisitAppContext visitAppContext = appContext.getContext();
return FloatingActionButton(
backgroundColor: kMainColor1,
onPressed: () async {
if(!isplaying && !audioplayed){
//player.play(BytesSource(audiobytes));
//await player.setUrl(widget.resourceURl);
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else if(audioplayed && !isplaying){
//player.resume();
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else{
player.pause();
setState(() {
isplaying = false;
});
}
},
child: isplaying ? Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Icon(Icons.pause),
Text(currentpostlabel),
],
) : audioplayed ? Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Icon(Icons.play_arrow),
Text(currentpostlabel),
],
): const Icon(Icons.play_arrow),
/*Column(
children: [
//Text(currentpostlabel, style: const TextStyle(fontSize: 25)),
Wrap(
spacing: 10,
children: [
ElevatedButton.icon(
style: ElevatedButton.styleFrom(
backgroundColor: kSecondColor, // Background color
),
onPressed: () async {
if(!isplaying && !audioplayed){
//player.play(BytesSource(audiobytes));
await player.setAudioSource(LoadedSource(audiobytes));
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else if(audioplayed && !isplaying){
//player.resume();
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else{
player.pause();
setState(() {
isplaying = false;
});
}
},
icon: Icon(isplaying?Icons.pause:Icons.play_arrow),
//label:Text(isplaying?TranslationHelper.getFromLocale("pause", appContext.getContext()):TranslationHelper.getFromLocale("play", appContext.getContext()))
),
/*ElevatedButton.icon(
style: ElevatedButton.styleFrom(
backgroundColor: kSecondColor, // Background color
),
onPressed: () async {
player.stop();
player.seek(const Duration(seconds: 0));
setState(() {
isplaying = false;
audioplayed = false;
currentpostlabel = "00:00";
});
},
icon: const Icon(Icons.stop),
//label: Text(TranslationHelper.getFromLocale("stop", appContext.getContext()))
),*/
],
)
],
),*/
);
}
}
// Feed your own stream of bytes into the player
class LoadedSource extends StreamAudioSource {
final List<int> bytes;
LoadedSource(this.bytes);
@override
Future<StreamAudioResponse> request([int? start, int? end]) async {
start ??= 0;
end ??= bytes.length;
return StreamAudioResponse(
sourceLength: bytes.length,
contentLength: end - start,
offset: start,
stream: Stream.value(bytes.sublist(start, end)),
contentType: 'audio/mpeg',
);
}
}