manager-app/lib/Components/audio_player.dart
2023-03-31 16:45:06 +02:00

242 lines
7.6 KiB
Dart

import 'dart:typed_data';
//import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/material.dart';
import 'package:manager_app/app_context.dart';
import 'package:manager_app/constants.dart';
import 'package:provider/provider.dart';
import 'package:just_audio/just_audio.dart';
class AudioPlayerFloatingContainer extends StatefulWidget {
const AudioPlayerFloatingContainer({Key key, this.audioBytes, this.isAuto}) : super(key: key);
final Uint8List audioBytes;
final bool isAuto;
@override
State<AudioPlayerFloatingContainer> createState() => _AudioPlayerFloatingContainerState();
}
class _AudioPlayerFloatingContainerState extends State<AudioPlayerFloatingContainer> {
AudioPlayer player = AudioPlayer();
Uint8List audiobytes;
bool isplaying = false;
bool audioplayed = false;
int currentpos = 0;
int maxduration = 100;
Duration durationAudio;
String currentpostlabel = "00:00";
@override
void initState() {
Future.delayed(Duration.zero, () async {
audiobytes = widget.audioBytes;
player.durationStream.listen((Duration d) { //get the duration of audio
maxduration = d.inSeconds;
durationAudio = d;
});
//player.bufferedPositionStream
player.positionStream.listen((event) {
if(event != null) {
currentpos = event.inMilliseconds; //get the current position of playing audio
//generating the duration label
int shours = Duration(milliseconds:durationAudio.inMilliseconds - currentpos).inHours;
int sminutes = Duration(milliseconds:durationAudio.inMilliseconds - currentpos).inMinutes;
int sseconds = Duration(milliseconds:durationAudio.inMilliseconds - currentpos).inSeconds;
int rminutes = sminutes - (shours * 60);
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
currentpostlabel = "$minutesToShow:$secondsToShow";
setState(() {
//refresh the UI
if(currentpos > player.duration.inMilliseconds) {
print("RESET ALL");
player.stop();
player.seek(const Duration(seconds: 0));
isplaying = false;
audioplayed = false;
currentpostlabel = "00:00";
}
});
}
});
/*player.onPositionChanged.listen((Duration p){
currentpos = p.inMilliseconds; //get the current position of playing audio
//generating the duration label
int shours = Duration(milliseconds:currentpos).inHours;
int sminutes = Duration(milliseconds:currentpos).inMinutes;
int sseconds = Duration(milliseconds:currentpos).inSeconds;
int rminutes = sminutes - (shours * 60);
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
currentpostlabel = "$minutesToShow:$secondsToShow";
setState(() {
//refresh the UI
});
});*/
if(widget.isAuto) {
//player.play(BytesSource(audiobytes));
await player.setAudioSource(LoadedSource(audiobytes));
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}
});
super.initState();
}
@override
void dispose() {
player.stop();
player.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
final appContext = Provider.of<AppContext>(context);
return FloatingActionButton(
backgroundColor: kPrimaryColor.withOpacity(0.7),
onPressed: () async {
print("TODO");
if(!isplaying && !audioplayed){
//player.play(BytesSource(audiobytes));
await player.setAudioSource(LoadedSource(audiobytes));
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else if(audioplayed && !isplaying){
//player.resume();
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else{
player.pause();
setState(() {
isplaying = false;
});
}
},
child: isplaying ? Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Icon(Icons.pause),
Text(currentpostlabel),
],
) : audioplayed ? Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Icon(Icons.play_arrow),
Text(currentpostlabel),
],
): const Icon(Icons.play_arrow),
/*Column(
children: [
//Text(currentpostlabel, style: const TextStyle(fontSize: 25)),
Wrap(
spacing: 10,
children: [
ElevatedButton.icon(
style: ElevatedButton.styleFrom(
backgroundColor: kSecondColor, // Background color
),
onPressed: () async {
if(!isplaying && !audioplayed){
//player.play(BytesSource(audiobytes));
await player.setAudioSource(LoadedSource(audiobytes));
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else if(audioplayed && !isplaying){
//player.resume();
player.play();
setState(() {
isplaying = true;
audioplayed = true;
});
}else{
player.pause();
setState(() {
isplaying = false;
});
}
},
icon: Icon(isplaying?Icons.pause:Icons.play_arrow),
//label:Text(isplaying?TranslationHelper.getFromLocale("pause", appContext.getContext()):TranslationHelper.getFromLocale("play", appContext.getContext()))
),
/*ElevatedButton.icon(
style: ElevatedButton.styleFrom(
backgroundColor: kSecondColor, // Background color
),
onPressed: () async {
player.stop();
player.seek(const Duration(seconds: 0));
setState(() {
isplaying = false;
audioplayed = false;
currentpostlabel = "00:00";
});
},
icon: const Icon(Icons.stop),
//label: Text(TranslationHelper.getFromLocale("stop", appContext.getContext()))
),*/
],
)
],
),*/
);
}
}
// Feed your own stream of bytes into the player
class LoadedSource extends StreamAudioSource {
final List<int> bytes;
LoadedSource(this.bytes);
@override
Future<StreamAudioResponse> request([int start, int end]) async {
start ??= 0;
end ??= bytes.length;
return StreamAudioResponse(
sourceLength: bytes.length,
contentLength: end - start,
offset: start,
stream: Stream.value(bytes.sublist(start, end)),
contentType: 'audio/mpeg',
);
}
}