413 lines
13 KiB
Dart
413 lines
13 KiB
Dart
|
|
import 'dart:async';
|
|
import 'dart:io';
|
|
import 'dart:typed_data';
|
|
|
|
import 'package:bus_infotainment/audio_cache.dart';
|
|
import 'package:bus_infotainment/backend/live_information.dart';
|
|
import 'package:bus_infotainment/backend/modules/tube_info.dart';
|
|
import 'package:bus_infotainment/tfl_datasets.dart';
|
|
import 'package:bus_infotainment/utils/audio%20wrapper.dart';
|
|
import 'package:bus_infotainment/utils/delegates.dart';
|
|
import 'package:flutter/foundation.dart';
|
|
import 'package:flutter/services.dart';
|
|
import 'package:shared_preferences/shared_preferences.dart';
|
|
|
|
import 'info_module.dart';
|
|
|
|
class AnnouncementModule extends InfoModule {
|
|
|
|
AnnouncementCache announcementCache = AnnouncementCache();
|
|
|
|
// Constructor
|
|
AnnouncementModule() {
|
|
refreshTimer();
|
|
}
|
|
|
|
// Files
|
|
String _bundleLocation = "assets/ibus_recordings.zip";
|
|
Uint8List? _bundleBytes;
|
|
void setBundleBytes(Uint8List bytes) {
|
|
_bundleBytes = bytes;
|
|
}
|
|
Future<Uint8List> getBundleBytes() async {
|
|
|
|
if (_bundleBytes != null) {
|
|
return _bundleBytes!;
|
|
} else {
|
|
|
|
// Try to load them from shared preferences
|
|
try {
|
|
SharedPreferences prefs = await SharedPreferences.getInstance();
|
|
String fileLocation = prefs.getString("AnnouncementsFileLocation")!;
|
|
|
|
File file = File(fileLocation);
|
|
setBundleBytes(file.readAsBytesSync());
|
|
return _bundleBytes!;
|
|
} catch (e) {
|
|
throw Exception("Loading announcements from assets has been deprecated.");
|
|
}
|
|
|
|
|
|
|
|
// if (kIsWeb) {
|
|
// throw Exception("Cannot load bundle bytes on web");
|
|
// }
|
|
//
|
|
// final bytes = await rootBundle.load(_bundleLocation);
|
|
// return bytes.buffer.asUint8List();
|
|
}
|
|
|
|
}
|
|
|
|
// Queue
|
|
List<AnnouncementQueueEntry> queue = [];
|
|
AnnouncementQueueEntry? currentAnnouncement;
|
|
DateTime? currentAnnouncementTimeStamp;
|
|
String defaultText = "*** NO MESSAGE ***";
|
|
bool isPlaying = false;
|
|
|
|
// Audio
|
|
AudioWrapper audioPlayer = AudioWrapper();
|
|
|
|
// Events
|
|
final EventDelegate<AnnouncementQueueEntry> onAnnouncement = EventDelegate();
|
|
|
|
// Timer
|
|
Timer refreshTimer() => Timer.periodic(const Duration(milliseconds: 10), (timer) async {
|
|
|
|
if (!isPlaying) {
|
|
|
|
if (queue.isNotEmpty) {
|
|
isPlaying = true;
|
|
AnnouncementQueueEntry nextAnnouncement = queue.first;
|
|
|
|
bool proceeding = await _internalAccountForInconsistentTime(
|
|
announcement: nextAnnouncement,
|
|
timerInterval: const Duration(milliseconds: 10),
|
|
callback: () {
|
|
queue.removeAt(0);
|
|
print("Announcement proceeding");
|
|
}
|
|
);
|
|
|
|
if (!proceeding) {
|
|
isPlaying = false;
|
|
print("Announcement not proceeding");
|
|
print("Queue: ${queue.length}");
|
|
return;
|
|
}
|
|
|
|
currentAnnouncement = nextAnnouncement;
|
|
currentAnnouncementTimeStamp = liveInformation.syncedTimeModule.Now();
|
|
|
|
onAnnouncement.trigger(currentAnnouncement!);
|
|
|
|
if (currentAnnouncement!.audioSources.isNotEmpty) {
|
|
|
|
// Prime all of the audio sources to be ready to play
|
|
for (AudioWrapperSource source in currentAnnouncement!.audioSources) {
|
|
try {
|
|
await audioPlayer.loadSource(source);
|
|
await Future.delayed((await audioPlayer.play())!);
|
|
audioPlayer.stop();
|
|
|
|
if (currentAnnouncement?.audioSources.last != source) {
|
|
await Future.delayed(const Duration(milliseconds: 100));
|
|
}
|
|
} catch (e) {
|
|
await Future.delayed(const Duration(seconds: 1));
|
|
}
|
|
}
|
|
|
|
} else {
|
|
if (queue.isNotEmpty) {
|
|
await Future.delayed(const Duration(seconds: 5));
|
|
}
|
|
}
|
|
|
|
isPlaying = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
});
|
|
|
|
// Will call the callback function if the announcement will be proceeding
|
|
Future<bool> _internalAccountForInconsistentTime({
|
|
required AnnouncementQueueEntry announcement,
|
|
required Duration timerInterval,
|
|
required Function() callback
|
|
}) async {
|
|
DateTime now = liveInformation.syncedTimeModule.Now();
|
|
if (announcement.scheduledTime != null) {
|
|
|
|
if (now.isAfter(announcement.scheduledTime!)) {
|
|
callback();
|
|
return true;
|
|
}
|
|
|
|
int milisecondDifference = abs(now.millisecondsSinceEpoch - announcement.scheduledTime!.millisecondsSinceEpoch);
|
|
if (milisecondDifference <= timerInterval.inMilliseconds) {
|
|
// Account for the time lost by the periodic timer
|
|
callback();
|
|
await Future.delayed(Duration(milliseconds: timerInterval.inMilliseconds - milisecondDifference));
|
|
return true;
|
|
} else {
|
|
return false;
|
|
}
|
|
} else {
|
|
callback();
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// Configuration
|
|
int get defaultAnnouncementDelay => liveInformation.auth.isAuthenticated() ? 1 : 0;
|
|
|
|
// Methods
|
|
Future<void> queueAnnounceByAudioName({
|
|
required String displayText,
|
|
List<String> audioNames = const [],
|
|
DateTime? scheduledTime = null,
|
|
bool sendToServer = true
|
|
}) async {
|
|
|
|
if (sendToServer) {
|
|
|
|
scheduledTime ??= liveInformation.syncedTimeModule.Now().add(Duration(seconds: defaultAnnouncementDelay));
|
|
|
|
String audioNamesString = "";
|
|
|
|
for (var audioName in audioNames) {
|
|
audioNamesString += "\"$audioName\" ";
|
|
}
|
|
|
|
liveInformation.SendCommand("announce manual \"$displayText\" $audioNamesString ${scheduledTime.millisecondsSinceEpoch}");
|
|
queueAnnounceByAudioName(
|
|
displayText: displayText,
|
|
audioNames: audioNames,
|
|
scheduledTime: scheduledTime,
|
|
sendToServer: false
|
|
);
|
|
|
|
return;
|
|
}
|
|
|
|
// Cache the announcements
|
|
await announcementCache.loadAnnouncementsFromBytes((await getBundleBytes())!, audioNames);
|
|
|
|
List<AudioWrapperSource> sources = [];
|
|
|
|
print("Audio names: $audioNames");
|
|
|
|
for (var audioName in audioNames) {
|
|
Uint8List? audioData = announcementCache[audioName];
|
|
|
|
if (audioData == null) {
|
|
continue;
|
|
}
|
|
|
|
sources.add(AudioWrapperByteSource(audioData));
|
|
|
|
}
|
|
|
|
queue.add(
|
|
AnnouncementQueueEntry(
|
|
displayText: displayText,
|
|
audioSources: sources,
|
|
scheduledTime: scheduledTime
|
|
)
|
|
);
|
|
|
|
}
|
|
|
|
void queueAnnouncementByInfoIndex({
|
|
int infoIndex = -1,
|
|
DateTime? scheduledTime = null,
|
|
bool sendToServer = true
|
|
}) {
|
|
|
|
if (sendToServer) {
|
|
|
|
scheduledTime ??= liveInformation.syncedTimeModule.Now().add(Duration(seconds: defaultAnnouncementDelay));
|
|
|
|
liveInformation.SendCommand("announce info $infoIndex ${scheduledTime.millisecondsSinceEpoch}");
|
|
queueAnnouncementByInfoIndex(
|
|
infoIndex: infoIndex,
|
|
scheduledTime: scheduledTime,
|
|
sendToServer: false
|
|
);
|
|
print("Announcement sent to server");
|
|
return;
|
|
}
|
|
|
|
NamedAnnouncementQueueEntry clone = NamedAnnouncementQueueEntry(
|
|
shortName: manualAnnouncements[infoIndex].shortName,
|
|
displayText: manualAnnouncements[infoIndex].displayText,
|
|
audioSources: manualAnnouncements[infoIndex].audioSources,
|
|
scheduledTime: scheduledTime
|
|
);
|
|
|
|
queue.add(clone);
|
|
}
|
|
|
|
Future<void> queueAnnouncementByRouteVariant({
|
|
required BusRouteVariant routeVariant,
|
|
DateTime? scheduledTime = null,
|
|
bool sendToServer = true
|
|
}) async {
|
|
|
|
if (sendToServer) {
|
|
|
|
scheduledTime ??= liveInformation.syncedTimeModule.Now().add(Duration(seconds: defaultAnnouncementDelay));
|
|
|
|
String routeNumber = routeVariant.busRoute.routeNumber;
|
|
int routeVariantIndex = routeVariant.busRoute.routeVariants.values.toList().indexOf(routeVariant);
|
|
|
|
liveInformation.SendCommand("announce dest ${routeNumber} ${routeVariantIndex} ${scheduledTime.millisecondsSinceEpoch}");
|
|
queueAnnouncementByRouteVariant(
|
|
routeVariant: routeVariant,
|
|
scheduledTime: scheduledTime,
|
|
sendToServer: false
|
|
);
|
|
|
|
return;
|
|
}
|
|
print("Checkpoint 4");
|
|
|
|
print(routeVariant);
|
|
print("Checkpoint 4.1");
|
|
|
|
|
|
String routeNumber = routeVariant.busRoute.routeNumber;
|
|
print("Checkpoint 4.2");
|
|
|
|
String destination = routeVariant.destination?.destination ?? "NullPointerException";
|
|
print("Destination: $destination");
|
|
print("Checkpoint 4.3");
|
|
|
|
|
|
String audioRoute = "R_${routeVariant.busRoute.routeNumber}_001.mp3";
|
|
print("Checkpoint 5");
|
|
await announcementCache.loadAnnouncementsFromBytes(await getBundleBytes(), [audioRoute]);
|
|
print("Checkpoint 6");
|
|
late AudioWrapperSource sourceRoute;
|
|
try {
|
|
sourceRoute = !routeNumber.toLowerCase().startsWith("ul") ?
|
|
AudioWrapperByteSource(announcementCache[audioRoute]!) :
|
|
AudioWrapperAssetSource("audio/R_RAIL_REPLACEMENT_SERVICE_001.mp3");
|
|
} catch (e) {
|
|
sourceRoute = AudioWrapperAssetSource("audio/R_SPECIAL_SERVICE_001.mp3");
|
|
}
|
|
|
|
if (routeNumber.toLowerCase().startsWith("ul")) {
|
|
|
|
TubeLine? closestLine = liveInformation.tubeStations.getClosestLine(routeVariant);
|
|
|
|
sourceRoute = closestLine?.getAudio() ?? sourceRoute;
|
|
routeNumber = closestLine?.getShortName() ?? routeNumber;
|
|
}
|
|
|
|
print("Checkpoint 6.1");
|
|
AudioWrapperSource sourceDestination = AudioWrapperByteSource(await routeVariant.destination!.getAudioBytes());
|
|
print("Checkpoint 7");
|
|
AnnouncementQueueEntry announcement = AnnouncementQueueEntry(
|
|
displayText: "$routeNumber to $destination",
|
|
audioSources: [sourceRoute, AudioWrapperAssetSource("audio/to_destination.wav"), sourceDestination],
|
|
scheduledTime: scheduledTime
|
|
);
|
|
|
|
print("Checkpoint 8");
|
|
queue.add(announcement);
|
|
}
|
|
|
|
// Constants
|
|
|
|
final List<NamedAnnouncementQueueEntry> manualAnnouncements = [
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Driver Change",
|
|
displayText: "Driver Change",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/driverchange.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "No Standing Upr Deck",
|
|
displayText: "No standing on the upper deck",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/nostanding.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Face Covering",
|
|
displayText: "Please wear a face covering!",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/facecovering.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Seats Upstairs",
|
|
displayText: "Seats are available upstairs",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/seatsupstairs.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Bus Terminates Here",
|
|
displayText: "Bus terminates here. Please take your belongings with you",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/busterminateshere.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Bus On Diversion",
|
|
displayText: "Bus on diversion. Please listen for further announcements",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/busondiversion.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Destination Change",
|
|
displayText: "Destination Changed - please listen for further instructions",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/destinationchange.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Wheelchair Space",
|
|
displayText: "Wheelchair space requested",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/wheelchairspace1.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Move Down The Bus",
|
|
displayText: "Please move down the bus",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/movedownthebus.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Next Stop Closed",
|
|
displayText: "The next bus stop is closed",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/nextstopclosed.wav")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "CCTV In Operation",
|
|
displayText: "CCTV is in operation on this bus",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/cctvoperation.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Safe Door Opening",
|
|
displayText: "Driver will open the doors when it is safe to do so",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/safedooropening.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Buggy Safety",
|
|
displayText: "For your child's safety, please remain with your buggy",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/buggysafety.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Wheelchair Space 2",
|
|
displayText: "Wheelchair priority space required",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/wheelchairspace2.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Service Regulation",
|
|
displayText: "Regulating service - please listen for further information",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/serviceregulation.mp3")],
|
|
),
|
|
NamedAnnouncementQueueEntry(
|
|
shortName: "Bus Ready To Depart",
|
|
displayText: "This bus is ready to depart",
|
|
audioSources: [AudioWrapperAssetSource("audio/manual_announcements/readytodepart.mp3")],
|
|
),
|
|
];
|
|
|
|
|
|
} |