diff --git a/lib/Screens/Article/article_page.dart b/lib/Screens/Article/article_page.dart index 2928aa0..6e2ac6d 100644 --- a/lib/Screens/Article/article_page.dart +++ b/lib/Screens/Article/article_page.dart @@ -18,6 +18,8 @@ import 'package:mymuseum_visitapp/client.dart'; import 'package:mymuseum_visitapp/constants.dart'; import 'package:provider/provider.dart'; +import 'audio_player_floating.dart'; + class ArticlePage extends StatefulWidget { const ArticlePage({Key? key, required this.visitAppContextIn, required this.articleId}) : super(key: key); @@ -65,7 +67,7 @@ class _ArticlePageState extends State { isTextSizeButton: true, ), body: FutureBuilder( - future: getArticle(appContext, appContext.clientAPI, widget.articleId), + future: getArticle(appContext, appContext.clientAPI, widget.articleId, false), builder: (context, AsyncSnapshot snapshot) { if(articleDTO != null && sectionDTO != null) { if(size.height > size.width) { @@ -81,8 +83,8 @@ class _ArticlePageState extends State { if(!articleDTO!.isContentTop!) getContent(size, appContext), - if(audioResourceModel != null) - AudioPlayerContainer(audioBytes: audiobytes, isAuto: articleDTO!.isReadAudioAuto!), + /*if(audioResourceModel != null) + AudioPlayerContainer(audioBytes: audiobytes, isAuto: articleDTO!.isReadAudioAuto!),*/ ], ); } else { @@ -105,8 +107,8 @@ class _ArticlePageState extends State { getContent(size, appContext), ], ), - if(audioResourceModel != null) - AudioPlayerContainer(audioBytes: audiobytes, isAuto: articleDTO!.isReadAudioAuto!) + /*if(audioResourceModel != null) + AudioPlayerContainer(audioBytes: audiobytes, isAuto: articleDTO!.isReadAudioAuto!)*/ ], ), ); @@ -116,7 +118,16 @@ class _ArticlePageState extends State { } } ), - //floatingActionButton: const ScannerBouton(isReplacement: true), + floatingActionButton: FutureBuilder( + future: getArticle(appContext, appContext.clientAPI, widget.articleId, true), + builder: (context, AsyncSnapshot snapshot) { + return Padding( + padding: EdgeInsets.only(right: 0, top: 0), //size.height*0.1 + child: audioResourceModel != null ? AudioPlayerFloatingContainer(audioBytes: audiobytes, isAuto: articleDTO!.isReadAudioAuto!) : null, + ); + } + ), + floatingActionButtonLocation: FloatingActionButtonLocation.miniEndFloat, //miniEndTop ); } @@ -244,7 +255,7 @@ class _ArticlePageState extends State { } } - Future getArticle(AppContext appContext, Client client, String articleId) async { + Future getArticle(AppContext appContext, Client client, String articleId, bool isAudio) async { try { if(sectionDTO == null || articleDTO == null) { bool isConfigOffline = (appContext.getContext() as VisitAppContext).configuration!.isOffline!; @@ -282,7 +293,7 @@ class _ArticlePageState extends State { if(sectionDTO!.type == SectionType.Article) { articleDTO = ArticleDTO.fromJson(jsonDecode(sectionDTO!.data!)); } - if(articleDTO != null) { + if(articleDTO != null && isAudio) { var audioIdArticle = articleDTO!.audioIds!.where((audioId) => audioId.language == (appContext.getContext() as VisitAppContext).language); if(audioIdArticle.isNotEmpty && audioIdArticle.first.value != null) { if(isConfigOffline) @@ -319,7 +330,7 @@ class _ArticlePageState extends State { } } - if(articleDTO!.images!.isNotEmpty) { + if(articleDTO!.images!.isNotEmpty && !isAudio) { for (var image in articleDTO!.images!) { if(image.resourceId != null) { if(isConfigOffline) diff --git a/lib/Screens/Article/audio_player_floating.dart b/lib/Screens/Article/audio_player_floating.dart new file mode 100644 index 0000000..912844a --- /dev/null +++ b/lib/Screens/Article/audio_player_floating.dart @@ -0,0 +1,231 @@ +import 'dart:typed_data'; + +//import 'package:audioplayers/audioplayers.dart'; +import 'package:flutter/material.dart'; +import 'package:mymuseum_visitapp/Helpers/translationHelper.dart'; +import 'package:mymuseum_visitapp/app_context.dart'; +import 'package:mymuseum_visitapp/constants.dart'; +import 'package:provider/provider.dart'; + +import 'package:just_audio/just_audio.dart'; + + +class AudioPlayerFloatingContainer extends StatefulWidget { + const AudioPlayerFloatingContainer({Key? key, required this.audioBytes, required this.isAuto}) : super(key: key); + + final Uint8List audioBytes; + final bool isAuto; + + @override + State createState() => _AudioPlayerFloatingContainerState(); +} + +class _AudioPlayerFloatingContainerState extends State { + AudioPlayer player = AudioPlayer(); + late Uint8List audiobytes; + bool isplaying = false; + bool audioplayed = false; + int currentpos = 0; + int maxduration = 100; + String currentpostlabel = "00:00"; + + @override + void initState() { + Future.delayed(Duration.zero, () async { + + audiobytes = widget.audioBytes; + /*player.onDurationChanged.listen((Duration d) { //get the duration of audio + maxduration = d.inSeconds; + setState(() { + }); + });*/ + + //player.bufferedPositionStream + + player.positionStream.listen((event) { + if(event != null) { + + currentpos = event.inMilliseconds; //get the current position of playing audio + + //generating the duration label + int shours = Duration(milliseconds:currentpos).inHours; + int sminutes = Duration(milliseconds:currentpos).inMinutes; + int sseconds = Duration(milliseconds:currentpos).inSeconds; + + int rminutes = sminutes - (shours * 60); + int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60); + + String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString(); + String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString(); + + currentpostlabel = "$minutesToShow:$secondsToShow"; + + setState(() { + //refresh the UI + if(currentpos > player.duration!.inMilliseconds) { + print("RESET ALL"); + player.stop(); + player.seek(const Duration(seconds: 0)); + isplaying = false; + audioplayed = false; + currentpostlabel = "00:00"; + } + }); + } + }); + + + + /*player.onPositionChanged.listen((Duration p){ + currentpos = p.inMilliseconds; //get the current position of playing audio + + //generating the duration label + int shours = Duration(milliseconds:currentpos).inHours; + int sminutes = Duration(milliseconds:currentpos).inMinutes; + int sseconds = Duration(milliseconds:currentpos).inSeconds; + + int rminutes = sminutes - (shours * 60); + int rseconds = sseconds - (sminutes * 60 + shours * 60 * 60); + + String minutesToShow = rminutes < 10 ? '0$rminutes': rminutes.toString(); + String secondsToShow = rseconds < 10 ? '0$rseconds': rseconds.toString(); + + currentpostlabel = "$minutesToShow:$secondsToShow"; + + setState(() { + //refresh the UI + }); + });*/ + + if(widget.isAuto) { + //player.play(BytesSource(audiobytes)); + await player.setAudioSource(LoadedSource(audiobytes)); + player.play(); + setState(() { + isplaying = true; + audioplayed = true; + }); + } + }); + super.initState(); + } + + @override + void dispose() { + player.stop(); + player.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + final appContext = Provider.of(context); + + + return FloatingActionButton( + backgroundColor: kBlue1.withOpacity(0.7), + onPressed: () async { + print("TODO"); + if(!isplaying && !audioplayed){ + //player.play(BytesSource(audiobytes)); + await player.setAudioSource(LoadedSource(audiobytes)); + player.play(); + setState(() { + isplaying = true; + audioplayed = true; + }); + }else if(audioplayed && !isplaying){ + //player.resume(); + player.play(); + setState(() { + isplaying = true; + audioplayed = true; + }); + }else{ + player.pause(); + setState(() { + isplaying = false; + }); + } + }, + child: isplaying ? const Icon(Icons.pause) : const Icon(Icons.play_arrow), + + /*Column( + children: [ + //Text(currentpostlabel, style: const TextStyle(fontSize: 25)), + Wrap( + spacing: 10, + children: [ + ElevatedButton.icon( + style: ElevatedButton.styleFrom( + backgroundColor: kSecondColor, // Background color + ), + onPressed: () async { + if(!isplaying && !audioplayed){ + //player.play(BytesSource(audiobytes)); + await player.setAudioSource(LoadedSource(audiobytes)); + player.play(); + setState(() { + isplaying = true; + audioplayed = true; + }); + }else if(audioplayed && !isplaying){ + //player.resume(); + player.play(); + setState(() { + isplaying = true; + audioplayed = true; + }); + }else{ + player.pause(); + setState(() { + isplaying = false; + }); + } + }, + icon: Icon(isplaying?Icons.pause:Icons.play_arrow), + //label:Text(isplaying?TranslationHelper.getFromLocale("pause", appContext.getContext()):TranslationHelper.getFromLocale("play", appContext.getContext())) + ), + + /*ElevatedButton.icon( + style: ElevatedButton.styleFrom( + backgroundColor: kSecondColor, // Background color + ), + onPressed: () async { + player.stop(); + player.seek(const Duration(seconds: 0)); + setState(() { + isplaying = false; + audioplayed = false; + currentpostlabel = "00:00"; + }); + }, + icon: const Icon(Icons.stop), + //label: Text(TranslationHelper.getFromLocale("stop", appContext.getContext())) + ),*/ + ], + ) + ], + ),*/ + ); + } +} + +// Feed your own stream of bytes into the player +class LoadedSource extends StreamAudioSource { + final List bytes; + LoadedSource(this.bytes); + + @override + Future request([int? start, int? end]) async { + start ??= 0; + end ??= bytes.length; + return StreamAudioResponse( + sourceLength: bytes.length, + contentLength: end - start, + offset: start, + stream: Stream.value(bytes.sublist(start, end)), + contentType: 'audio/mpeg', + ); + } +} diff --git a/pubspec.yaml b/pubspec.yaml index 22487cc..de090c3 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -15,7 +15,7 @@ publish_to: 'none' # Remove this line if you wish to publish to pub.dev # In iOS, build-name is used as CFBundleShortVersionString while build-number used as CFBundleVersion. # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 1.0.5+13 +version: 1.0.6+14 environment: sdk: ">=2.16.2 <3.0.0"