mirror of
https://bitbucket.org/FransoletThomas/tablet-app.git
synced 2025-12-06 08:31:19 +00:00
248 lines
8.0 KiB
Dart
248 lines
8.0 KiB
Dart
import 'dart:typed_data';
|
|
import 'package:flutter/material.dart';
|
|
import 'package:provider/provider.dart';
|
|
|
|
import 'package:just_audio/just_audio.dart';
|
|
import 'package:just_audio_cache/just_audio_cache.dart';
|
|
import 'package:tablet_app/Models/tabletContext.dart';
|
|
import 'package:tablet_app/app_context.dart';
|
|
|
|
|
|
class AudioPlayerFloatingContainer extends StatefulWidget {
|
|
const AudioPlayerFloatingContainer({Key? key, required this.audioBytes, required this.resourceURl, required this.isAuto}) : super(key: key);
|
|
|
|
final Uint8List? audioBytes;
|
|
final String resourceURl;
|
|
final bool isAuto;
|
|
|
|
@override
|
|
State<AudioPlayerFloatingContainer> createState() => _AudioPlayerFloatingContainerState();
|
|
}
|
|
|
|
class _AudioPlayerFloatingContainerState extends State<AudioPlayerFloatingContainer> {
|
|
AudioPlayer player = AudioPlayer();
|
|
late Uint8List audiobytes;
|
|
bool isplaying = false;
|
|
bool audioplayed = false;
|
|
int currentpos = 0;
|
|
int maxduration = 100;
|
|
Duration? durationAudio;
|
|
String currentpostlabel = "00:00";
|
|
|
|
@override
|
|
void initState() {
|
|
print("IN INITSTATE AUDDDIOOOO");
|
|
Future.delayed(Duration.zero, () async {
|
|
if(widget.audioBytes != null) {
|
|
audiobytes = widget.audioBytes!;
|
|
}
|
|
|
|
player.durationStream.listen((Duration? d) { //get the duration of audio
|
|
if(d != null) {
|
|
maxduration = d.inSeconds;
|
|
durationAudio = d;
|
|
}
|
|
});
|
|
|
|
//player.bufferedPositionStream
|
|
|
|
player.positionStream.listen((event) {
|
|
if(durationAudio != null) {
|
|
|
|
currentpos = event.inMilliseconds; //get the current position of playing audio
|
|
|
|
//generating the duration label
|
|
int shours = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inHours;
|
|
int sminutes = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inMinutes;
|
|
int sseconds = Duration(milliseconds:durationAudio!.inMilliseconds - currentpos).inSeconds;
|
|
|
|
int rminutes = sminutes - (shours * 60);
|
|
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
|
|
|
|
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
|
|
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
|
|
|
|
currentpostlabel = "$minutesToShow:$secondsToShow";
|
|
|
|
setState(() {
|
|
//refresh the UI
|
|
if(currentpos > player.duration!.inMilliseconds) {
|
|
print("RESET ALL");
|
|
player.stop();
|
|
player.seek(const Duration(seconds: 0));
|
|
isplaying = false;
|
|
audioplayed = false;
|
|
currentpostlabel = "00:00";
|
|
}
|
|
});
|
|
}
|
|
});
|
|
|
|
|
|
|
|
/*player.onPositionChanged.listen((Duration p){
|
|
currentpos = p.inMilliseconds; //get the current position of playing audio
|
|
|
|
//generating the duration label
|
|
int shours = Duration(milliseconds:currentpos).inHours;
|
|
int sminutes = Duration(milliseconds:currentpos).inMinutes;
|
|
int sseconds = Duration(milliseconds:currentpos).inSeconds;
|
|
|
|
int rminutes = sminutes - (shours * 60);
|
|
int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60);
|
|
|
|
String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString();
|
|
String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString();
|
|
|
|
currentpostlabel = "$minutesToShow:$secondsToShow";
|
|
|
|
setState(() {
|
|
//refresh the UI
|
|
});
|
|
});*/
|
|
|
|
await player.dynamicSet(url: widget.resourceURl);
|
|
if(widget.isAuto) {
|
|
//player.play(BytesSource(audiobytes));
|
|
//await player.setAudioSource(LoadedSource(audiobytes));
|
|
player.play();
|
|
setState(() {
|
|
isplaying = true;
|
|
audioplayed = true;
|
|
});
|
|
}
|
|
});
|
|
super.initState();
|
|
}
|
|
|
|
@override
|
|
void dispose() {
|
|
player.stop();
|
|
player.dispose();
|
|
super.dispose();
|
|
}
|
|
|
|
@override
|
|
Widget build(BuildContext context) {
|
|
final appContext = Provider.of<AppContext>(context);
|
|
TabletAppContext tabletAppContext = appContext.getContext();
|
|
|
|
return FloatingActionButton(
|
|
backgroundColor: new Color(int.parse(tabletAppContext.configuration!.primaryColor!.split('(0x')[1].split(')')[0], radix: 16)).withOpacity(0.7),
|
|
onPressed: () async {
|
|
if(!isplaying && !audioplayed){
|
|
//player.play(BytesSource(audiobytes));
|
|
//await player.setUrl(widget.resourceURl);
|
|
player.play();
|
|
setState(() {
|
|
isplaying = true;
|
|
audioplayed = true;
|
|
});
|
|
}else if(audioplayed && !isplaying){
|
|
//player.resume();
|
|
player.play();
|
|
setState(() {
|
|
isplaying = true;
|
|
audioplayed = true;
|
|
});
|
|
}else{
|
|
player.pause();
|
|
setState(() {
|
|
isplaying = false;
|
|
});
|
|
}
|
|
},
|
|
child: isplaying ? Column(
|
|
mainAxisAlignment: MainAxisAlignment.center,
|
|
children: [
|
|
const Icon(Icons.pause),
|
|
Text(currentpostlabel),
|
|
],
|
|
) : audioplayed ? Column(
|
|
mainAxisAlignment: MainAxisAlignment.center,
|
|
children: [
|
|
const Icon(Icons.play_arrow),
|
|
Text(currentpostlabel),
|
|
],
|
|
): const Icon(Icons.play_arrow),
|
|
|
|
/*Column(
|
|
children: [
|
|
//Text(currentpostlabel, style: const TextStyle(fontSize: 25)),
|
|
Wrap(
|
|
spacing: 10,
|
|
children: [
|
|
ElevatedButton.icon(
|
|
style: ElevatedButton.styleFrom(
|
|
backgroundColor: kSecondColor, // Background color
|
|
),
|
|
onPressed: () async {
|
|
if(!isplaying && !audioplayed){
|
|
//player.play(BytesSource(audiobytes));
|
|
await player.setAudioSource(LoadedSource(audiobytes));
|
|
player.play();
|
|
setState(() {
|
|
isplaying = true;
|
|
audioplayed = true;
|
|
});
|
|
}else if(audioplayed && !isplaying){
|
|
//player.resume();
|
|
player.play();
|
|
setState(() {
|
|
isplaying = true;
|
|
audioplayed = true;
|
|
});
|
|
}else{
|
|
player.pause();
|
|
setState(() {
|
|
isplaying = false;
|
|
});
|
|
}
|
|
},
|
|
icon: Icon(isplaying?Icons.pause:Icons.play_arrow),
|
|
//label:Text(isplaying?TranslationHelper.getFromLocale("pause", appContext.getContext()):TranslationHelper.getFromLocale("play", appContext.getContext()))
|
|
),
|
|
|
|
/*ElevatedButton.icon(
|
|
style: ElevatedButton.styleFrom(
|
|
backgroundColor: kSecondColor, // Background color
|
|
),
|
|
onPressed: () async {
|
|
player.stop();
|
|
player.seek(const Duration(seconds: 0));
|
|
setState(() {
|
|
isplaying = false;
|
|
audioplayed = false;
|
|
currentpostlabel = "00:00";
|
|
});
|
|
},
|
|
icon: const Icon(Icons.stop),
|
|
//label: Text(TranslationHelper.getFromLocale("stop", appContext.getContext()))
|
|
),*/
|
|
],
|
|
)
|
|
],
|
|
),*/
|
|
);
|
|
}
|
|
}
|
|
|
|
// Feed your own stream of bytes into the player
|
|
class LoadedSource extends StreamAudioSource {
|
|
final List<int> bytes;
|
|
LoadedSource(this.bytes);
|
|
|
|
@override
|
|
Future<StreamAudioResponse> request([int? start, int? end]) async {
|
|
start ??= 0;
|
|
end ??= bytes.length;
|
|
return StreamAudioResponse(
|
|
sourceLength: bytes.length,
|
|
contentLength: end - start,
|
|
offset: start,
|
|
stream: Stream.value(bytes.sublist(start, end)),
|
|
contentType: 'audio/mpeg',
|
|
);
|
|
}
|
|
}
|