196 lines
5.0 KiB
Dart
196 lines
5.0 KiB
Dart
|
|
import 'package:audioplayers/audioplayers.dart' as audioplayers;
|
|
import 'package:flutter/foundation.dart';
|
|
import 'package:just_audio/just_audio.dart' as justaudio;
|
|
|
|
enum AudioWrapper_Mode {
|
|
Web,
|
|
Mobile
|
|
}
|
|
|
|
enum AudioWrapper_State {
|
|
Playing,
|
|
NotPlaying
|
|
}
|
|
|
|
class AudioWrapper {
|
|
|
|
audioplayers.AudioPlayer _audioPlayer_AudioPlayer = audioplayers.AudioPlayer();
|
|
justaudio.AudioPlayer _justAudio_AudioPlayer = justaudio.AudioPlayer();
|
|
|
|
AudioWrapper_Mode mode = kIsWeb ? AudioWrapper_Mode.Web : AudioWrapper_Mode.Mobile;
|
|
|
|
AudioWrapper() {
|
|
// mode = AudioWrapper_Mode.Web;
|
|
|
|
print("AudioWrapper mode: $mode");
|
|
|
|
mode = AudioWrapper_Mode.Web;
|
|
}
|
|
|
|
justaudio.AudioSource _convertSource_JustAudio(AudioWrapperSource source){
|
|
if (source is AudioWrapperByteSource){
|
|
return _ByteSource(source.bytes);
|
|
} else if (source is AudioWrapperAssetSource){
|
|
return justaudio.AudioSource.asset("assets/" + source.assetPath);
|
|
} else {
|
|
throw Exception("Unknown source type");
|
|
}
|
|
}
|
|
|
|
audioplayers.Source _convertSource_AudioPlayers(AudioWrapperSource source){
|
|
if (source is AudioWrapperByteSource){
|
|
return audioplayers.BytesSource(source.bytes);
|
|
} else if (source is AudioWrapperAssetSource){
|
|
return audioplayers.AssetSource(source.assetPath);
|
|
} else {
|
|
throw Exception("Unknown source type");
|
|
}
|
|
}
|
|
|
|
|
|
Future<void> loadSource(AudioWrapperSource source) async {
|
|
if (mode == AudioWrapper_Mode.Web) {
|
|
// Use just_audio
|
|
|
|
justaudio.AudioSource audioSource = _convertSource_JustAudio(source);
|
|
|
|
await _justAudio_AudioPlayer.setAudioSource(audioSource);
|
|
|
|
} else if (mode == AudioWrapper_Mode.Mobile) {
|
|
// Use audioplayers
|
|
|
|
audioplayers.Source audioSource = _convertSource_AudioPlayers(source);
|
|
|
|
await _audioPlayer_AudioPlayer.setSource(audioSource);
|
|
// await _audioPlayer_AudioPlayer.play(audioSource);
|
|
}
|
|
}
|
|
|
|
Future<Duration?> getDuration() async {
|
|
if (mode == AudioWrapper_Mode.Web) {
|
|
return _justAudio_AudioPlayer.duration;
|
|
} else if (mode == AudioWrapper_Mode.Mobile) {
|
|
return await _audioPlayer_AudioPlayer.getDuration();
|
|
}
|
|
}
|
|
|
|
Future<Duration?> play() async {
|
|
|
|
Duration? duration;
|
|
|
|
if (mode == AudioWrapper_Mode.Web) {
|
|
_justAudio_AudioPlayer.play();
|
|
duration = _justAudio_AudioPlayer.duration;
|
|
} else if (mode == AudioWrapper_Mode.Mobile) {
|
|
_audioPlayer_AudioPlayer.resume();
|
|
duration = await _audioPlayer_AudioPlayer.getDuration();
|
|
}
|
|
return duration;
|
|
}
|
|
|
|
// Future<Duration?> play(AudioWrapperSource source) async {
|
|
//
|
|
// Duration? duration;
|
|
//
|
|
// if (mode == AudioWrapper_Mode.Web) {
|
|
// // Use just_audio
|
|
//
|
|
// justaudio.AudioSource audioSource = _convertSource_JustAudio(source);
|
|
//
|
|
// duration = await _justAudio_AudioPlayer.setAudioSource(audioSource);
|
|
//
|
|
// _justAudio_AudioPlayer.play();
|
|
//
|
|
//
|
|
// } else if (mode == AudioWrapper_Mode.Mobile) {
|
|
// // Use audioplayers
|
|
//
|
|
// audioplayers.Source audioSource = _convertSource_AudioPlayers(source);
|
|
//
|
|
// await _audioPlayer_AudioPlayer.play(audioSource);
|
|
//
|
|
// duration = await _audioPlayer_AudioPlayer.getDuration();
|
|
// }
|
|
//
|
|
//
|
|
//
|
|
// return duration;
|
|
// }
|
|
|
|
void stop(){
|
|
if (mode == AudioWrapper_Mode.Web) {
|
|
_justAudio_AudioPlayer.stop();
|
|
try {
|
|
_justAudio_AudioPlayer.dispose();
|
|
} catch (e) {}
|
|
_justAudio_AudioPlayer = justaudio.AudioPlayer();
|
|
} else if (mode == AudioWrapper_Mode.Mobile) {
|
|
// _audioPlayer_AudioPlayer.stop();
|
|
try {
|
|
// _audioPlayer_AudioPlayer.dispose();
|
|
} catch (e) {}
|
|
// _audioPlayer_AudioPlayer = audioplayers.AudioPlayer();
|
|
}
|
|
}
|
|
|
|
AudioWrapper_State get state {
|
|
if (mode == AudioWrapper_Mode.Web) {
|
|
if (_justAudio_AudioPlayer.playing){
|
|
return AudioWrapper_State.Playing;
|
|
} else {
|
|
return AudioWrapper_State.NotPlaying;
|
|
}
|
|
} else {
|
|
// if (_audioPlayer_AudioPlayer.state == audioplayers.PlayerState.playing){
|
|
// return AudioWrapper_State.Playing;
|
|
// } else {
|
|
// return AudioWrapper_State.NotPlaying;
|
|
// }
|
|
}
|
|
return AudioWrapper_State.NotPlaying;
|
|
}
|
|
|
|
}
|
|
|
|
class AudioWrapperSource {
|
|
|
|
}
|
|
|
|
class AudioWrapperByteSource extends AudioWrapperSource {
|
|
|
|
Uint8List bytes = Uint8List(0);
|
|
|
|
AudioWrapperByteSource(Uint8List bytes){
|
|
this.bytes = bytes;
|
|
}
|
|
|
|
}
|
|
|
|
class AudioWrapperAssetSource extends AudioWrapperSource {
|
|
|
|
String assetPath = "";
|
|
|
|
AudioWrapperAssetSource(String assetPath){
|
|
this.assetPath = assetPath;
|
|
}
|
|
|
|
}
|
|
|
|
class _ByteSource extends justaudio.StreamAudioSource {
|
|
final List<int> bytes;
|
|
_ByteSource(this.bytes);
|
|
|
|
@override
|
|
Future<justaudio.StreamAudioResponse> request([int? start, int? end]) async {
|
|
start ??= 0;
|
|
end ??= bytes.length;
|
|
return justaudio.StreamAudioResponse(
|
|
sourceLength: bytes.length,
|
|
contentLength: end - start,
|
|
offset: start,
|
|
stream: Stream.value(bytes.sublist(start, end)),
|
|
contentType: 'audio/mpeg',
|
|
);
|
|
}
|
|
} |