You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
529 lines
20 KiB
Dart
529 lines
20 KiB
Dart
import 'dart:async';
|
|
import 'dart:io';
|
|
|
|
import 'package:flutter/foundation.dart';
|
|
import 'package:flutter/material.dart';
|
|
|
|
import 'package:async/async.dart';
|
|
import 'package:flutter_linkify/flutter_linkify.dart';
|
|
import 'package:just_audio/just_audio.dart';
|
|
import 'package:matrix/matrix.dart';
|
|
import 'package:opus_caf_converter_dart/opus_caf_converter_dart.dart';
|
|
import 'package:path_provider/path_provider.dart';
|
|
|
|
import 'package:fluffychat/config/app_config.dart';
|
|
import 'package:fluffychat/config/themes.dart';
|
|
import 'package:fluffychat/utils/error_reporter.dart';
|
|
import 'package:fluffychat/utils/file_description.dart';
|
|
import 'package:fluffychat/utils/localized_exception_extension.dart';
|
|
import 'package:fluffychat/utils/url_launcher.dart';
|
|
import '../../../utils/matrix_sdk_extensions/event_extension.dart';
|
|
import '../../../widgets/fluffy_chat_app.dart';
|
|
import '../../../widgets/matrix.dart';
|
|
|
|
class AudioPlayerWidget extends StatefulWidget {
|
|
final Color color;
|
|
final Color linkColor;
|
|
final double fontSize;
|
|
final Event event;
|
|
|
|
static const int wavesCount = 40;
|
|
|
|
const AudioPlayerWidget(
|
|
this.event, {
|
|
required this.color,
|
|
required this.linkColor,
|
|
required this.fontSize,
|
|
super.key,
|
|
});
|
|
|
|
@override
|
|
AudioPlayerState createState() => AudioPlayerState();
|
|
}
|
|
|
|
enum AudioPlayerStatus { notDownloaded, downloading, downloaded }
|
|
|
|
class AudioPlayerState extends State<AudioPlayerWidget> {
|
|
static const double buttonSize = 36;
|
|
|
|
AudioPlayerStatus status = AudioPlayerStatus.notDownloaded;
|
|
|
|
late final MatrixState matrix;
|
|
List<int>? _waveform;
|
|
String? _durationString;
|
|
|
|
@override
|
|
void dispose() {
|
|
super.dispose();
|
|
final audioPlayer = matrix.voiceMessageEventId.value != widget.event.eventId
|
|
? null
|
|
: matrix.audioPlayer;
|
|
if (audioPlayer != null) {
|
|
if (audioPlayer.playing && !audioPlayer.isAtEndPosition) {
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
ScaffoldMessenger.of(matrix.context).showMaterialBanner(
|
|
MaterialBanner(
|
|
padding: EdgeInsets.zero,
|
|
leading: StreamBuilder(
|
|
stream: audioPlayer.playerStateStream.asBroadcastStream(),
|
|
builder: (context, _) => IconButton(
|
|
onPressed: () {
|
|
if (audioPlayer.isAtEndPosition) {
|
|
audioPlayer.seek(Duration.zero);
|
|
} else if (audioPlayer.playing) {
|
|
audioPlayer.pause();
|
|
} else {
|
|
audioPlayer.play();
|
|
}
|
|
},
|
|
icon: audioPlayer.playing && !audioPlayer.isAtEndPosition
|
|
? const Icon(Icons.pause_outlined)
|
|
: const Icon(Icons.play_arrow_outlined),
|
|
),
|
|
),
|
|
content: StreamBuilder(
|
|
stream: audioPlayer.positionStream.asBroadcastStream(),
|
|
builder: (context, _) => GestureDetector(
|
|
onTap: () => FluffyChatApp.router.go(
|
|
'/rooms/${widget.event.room.id}?event=${widget.event.eventId}',
|
|
),
|
|
child: Text(
|
|
'🎙️ ${audioPlayer.position.minuteSecondString} / ${audioPlayer.duration?.minuteSecondString} - ${widget.event.senderFromMemoryOrFallback.calcDisplayname()}',
|
|
maxLines: 1,
|
|
overflow: TextOverflow.ellipsis,
|
|
),
|
|
),
|
|
),
|
|
actions: [
|
|
IconButton(
|
|
onPressed: () {
|
|
audioPlayer.pause();
|
|
audioPlayer.dispose();
|
|
matrix.voiceMessageEventId.value =
|
|
matrix.audioPlayer = null;
|
|
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
ScaffoldMessenger.of(matrix.context)
|
|
.clearMaterialBanners();
|
|
});
|
|
},
|
|
icon: const Icon(Icons.close_outlined),
|
|
),
|
|
],
|
|
),
|
|
);
|
|
});
|
|
return;
|
|
}
|
|
audioPlayer.pause();
|
|
audioPlayer.dispose();
|
|
matrix.voiceMessageEventId.value = matrix.audioPlayer = null;
|
|
}
|
|
}
|
|
|
|
void _onButtonTap() async {
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
ScaffoldMessenger.of(matrix.context).clearMaterialBanners();
|
|
});
|
|
final currentPlayer =
|
|
matrix.voiceMessageEventId.value != widget.event.eventId
|
|
? null
|
|
: matrix.audioPlayer;
|
|
if (currentPlayer != null) {
|
|
if (currentPlayer.isAtEndPosition) {
|
|
currentPlayer.seek(Duration.zero);
|
|
} else if (currentPlayer.playing) {
|
|
currentPlayer.pause();
|
|
} else {
|
|
currentPlayer.play();
|
|
}
|
|
return;
|
|
}
|
|
|
|
matrix.voiceMessageEventId.value = widget.event.eventId;
|
|
matrix.audioPlayer
|
|
?..stop()
|
|
..dispose();
|
|
File? file;
|
|
MatrixFile? matrixFile;
|
|
|
|
setState(() => status = AudioPlayerStatus.downloading);
|
|
try {
|
|
matrixFile = await widget.event.downloadAndDecryptAttachment();
|
|
|
|
if (!kIsWeb) {
|
|
final tempDir = await getTemporaryDirectory();
|
|
final fileName = Uri.encodeComponent(
|
|
widget.event.attachmentOrThumbnailMxcUrl()!.pathSegments.last,
|
|
);
|
|
file = File('${tempDir.path}/${fileName}_${matrixFile.name}');
|
|
|
|
await file.writeAsBytes(matrixFile.bytes);
|
|
|
|
if (Platform.isIOS &&
|
|
matrixFile.mimeType.toLowerCase() == 'audio/ogg') {
|
|
Logs().v('Convert ogg audio file for iOS...');
|
|
final convertedFile = File('${file.path}.caf');
|
|
if (await convertedFile.exists() == false) {
|
|
OpusCaf().convertOpusToCaf(file.path, convertedFile.path);
|
|
}
|
|
file = convertedFile;
|
|
}
|
|
}
|
|
|
|
setState(() {
|
|
status = AudioPlayerStatus.downloaded;
|
|
});
|
|
} catch (e, s) {
|
|
Logs().v('Could not download audio file', e, s);
|
|
ScaffoldMessenger.of(context).showSnackBar(
|
|
SnackBar(
|
|
content: Text(e.toLocalizedString(context)),
|
|
),
|
|
);
|
|
rethrow;
|
|
}
|
|
if (!context.mounted) return;
|
|
if (matrix.voiceMessageEventId.value != widget.event.eventId) return;
|
|
|
|
final audioPlayer = matrix.audioPlayer = AudioPlayer();
|
|
|
|
if (file != null) {
|
|
audioPlayer.setFilePath(file.path);
|
|
} else {
|
|
await audioPlayer.setAudioSource(MatrixFileAudioSource(matrixFile));
|
|
}
|
|
|
|
audioPlayer.play().onError(
|
|
ErrorReporter(context, 'Unable to play audio message')
|
|
.onErrorCallback,
|
|
);
|
|
}
|
|
|
|
void _toggleSpeed() async {
|
|
final audioPlayer = matrix.audioPlayer;
|
|
if (audioPlayer == null) return;
|
|
switch (audioPlayer.speed) {
|
|
case 1.0:
|
|
await audioPlayer.setSpeed(1.25);
|
|
break;
|
|
case 1.25:
|
|
await audioPlayer.setSpeed(1.5);
|
|
break;
|
|
case 1.5:
|
|
await audioPlayer.setSpeed(2.0);
|
|
break;
|
|
case 2.0:
|
|
await audioPlayer.setSpeed(0.5);
|
|
break;
|
|
case 0.5:
|
|
default:
|
|
await audioPlayer.setSpeed(1.0);
|
|
break;
|
|
}
|
|
setState(() {});
|
|
}
|
|
|
|
List<int>? _getWaveform() {
|
|
final eventWaveForm = widget.event.content
|
|
.tryGetMap<String, dynamic>('org.matrix.msc1767.audio')
|
|
?.tryGetList<int>('waveform');
|
|
if (eventWaveForm == null || eventWaveForm.isEmpty) {
|
|
return null;
|
|
}
|
|
while (eventWaveForm.length < AudioPlayerWidget.wavesCount) {
|
|
for (var i = 0; i < eventWaveForm.length; i = i + 2) {
|
|
eventWaveForm.insert(i, eventWaveForm[i]);
|
|
}
|
|
}
|
|
var i = 0;
|
|
final step = (eventWaveForm.length / AudioPlayerWidget.wavesCount).round();
|
|
while (eventWaveForm.length > AudioPlayerWidget.wavesCount) {
|
|
eventWaveForm.removeAt(i);
|
|
i = (i + step) % AudioPlayerWidget.wavesCount;
|
|
}
|
|
return eventWaveForm.map((i) => i > 1024 ? 1024 : i).toList();
|
|
}
|
|
|
|
@override
|
|
void initState() {
|
|
super.initState();
|
|
matrix = Matrix.of(context);
|
|
_waveform = _getWaveform();
|
|
|
|
if (matrix.voiceMessageEventId.value == widget.event.eventId &&
|
|
matrix.audioPlayer != null) {
|
|
WidgetsBinding.instance.addPostFrameCallback((_) {
|
|
ScaffoldMessenger.of(matrix.context).clearMaterialBanners();
|
|
});
|
|
}
|
|
|
|
final durationInt = widget.event.content
|
|
.tryGetMap<String, dynamic>('info')
|
|
?.tryGet<int>('duration');
|
|
if (durationInt != null) {
|
|
final duration = Duration(milliseconds: durationInt);
|
|
_durationString = duration.minuteSecondString;
|
|
}
|
|
}
|
|
|
|
@override
|
|
Widget build(BuildContext context) {
|
|
final theme = Theme.of(context);
|
|
final waveform = _waveform;
|
|
|
|
return ValueListenableBuilder(
|
|
valueListenable: matrix.voiceMessageEventId,
|
|
builder: (context, eventId, _) {
|
|
final audioPlayer =
|
|
eventId != widget.event.eventId ? null : matrix.audioPlayer;
|
|
|
|
final fileDescription = widget.event.fileDescription;
|
|
|
|
return StreamBuilder<Object>(
|
|
stream: audioPlayer == null
|
|
? null
|
|
: StreamGroup.merge([
|
|
audioPlayer.positionStream.asBroadcastStream(),
|
|
audioPlayer.playerStateStream.asBroadcastStream(),
|
|
]),
|
|
builder: (context, _) {
|
|
final maxPosition =
|
|
audioPlayer?.duration?.inMilliseconds.toDouble() ?? 1.0;
|
|
var currentPosition =
|
|
audioPlayer?.position.inMilliseconds.toDouble() ?? 0.0;
|
|
if (currentPosition > maxPosition) currentPosition = maxPosition;
|
|
|
|
final wavePosition =
|
|
(currentPosition / maxPosition) * AudioPlayerWidget.wavesCount;
|
|
|
|
final statusText = audioPlayer == null
|
|
? _durationString ?? '00:00'
|
|
: audioPlayer.position.minuteSecondString;
|
|
return Padding(
|
|
padding: const EdgeInsets.all(12.0),
|
|
child: Column(
|
|
mainAxisSize: MainAxisSize.min,
|
|
crossAxisAlignment: CrossAxisAlignment.start,
|
|
children: [
|
|
ConstrainedBox(
|
|
constraints: const BoxConstraints(
|
|
maxWidth: FluffyThemes.columnWidth,
|
|
),
|
|
child: Row(
|
|
mainAxisSize: MainAxisSize.min,
|
|
children: <Widget>[
|
|
SizedBox(
|
|
width: buttonSize,
|
|
height: buttonSize,
|
|
child: status == AudioPlayerStatus.downloading
|
|
? CircularProgressIndicator(
|
|
strokeWidth: 2,
|
|
color: widget.color,
|
|
)
|
|
: InkWell(
|
|
borderRadius: BorderRadius.circular(64),
|
|
onLongPress: () =>
|
|
widget.event.saveFile(context),
|
|
onTap: _onButtonTap,
|
|
child: Material(
|
|
color: widget.color.withAlpha(64),
|
|
borderRadius: BorderRadius.circular(64),
|
|
child: Icon(
|
|
audioPlayer?.playing == true &&
|
|
audioPlayer?.isAtEndPosition ==
|
|
false
|
|
? Icons.pause_outlined
|
|
: Icons.play_arrow_outlined,
|
|
color: widget.color,
|
|
),
|
|
),
|
|
),
|
|
),
|
|
const SizedBox(width: 8),
|
|
Expanded(
|
|
child: Stack(
|
|
children: [
|
|
if (waveform != null)
|
|
Padding(
|
|
padding: const EdgeInsets.symmetric(
|
|
horizontal: 16.0,
|
|
),
|
|
child: Row(
|
|
children: [
|
|
for (var i = 0;
|
|
i < AudioPlayerWidget.wavesCount;
|
|
i++)
|
|
Expanded(
|
|
child: Container(
|
|
height: 32,
|
|
alignment: Alignment.center,
|
|
child: Container(
|
|
margin:
|
|
const EdgeInsets.symmetric(
|
|
horizontal: 1,
|
|
),
|
|
decoration: BoxDecoration(
|
|
color: i < wavePosition
|
|
? widget.color
|
|
: widget.color
|
|
.withAlpha(128),
|
|
borderRadius:
|
|
BorderRadius.circular(64),
|
|
),
|
|
height: 32 * (waveform[i] / 1024),
|
|
),
|
|
),
|
|
),
|
|
],
|
|
),
|
|
),
|
|
SizedBox(
|
|
height: 32,
|
|
child: Slider(
|
|
thumbColor: widget.event.senderId ==
|
|
widget.event.room.client.userID
|
|
? theme.colorScheme.onPrimary
|
|
: theme.colorScheme.primary,
|
|
activeColor: waveform == null
|
|
? widget.color
|
|
: Colors.transparent,
|
|
inactiveColor: waveform == null
|
|
? widget.color.withAlpha(128)
|
|
: Colors.transparent,
|
|
max: maxPosition,
|
|
value: currentPosition,
|
|
onChanged: (position) => audioPlayer == null
|
|
? _onButtonTap()
|
|
: audioPlayer.seek(
|
|
Duration(
|
|
milliseconds: position.round(),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
],
|
|
),
|
|
),
|
|
const SizedBox(width: 8),
|
|
SizedBox(
|
|
width: 36,
|
|
child: Text(
|
|
statusText,
|
|
style: TextStyle(
|
|
color: widget.color,
|
|
fontSize: 12,
|
|
),
|
|
),
|
|
),
|
|
const SizedBox(width: 8),
|
|
AnimatedCrossFade(
|
|
firstChild: Padding(
|
|
padding: const EdgeInsets.only(right: 8.0),
|
|
child: Icon(
|
|
Icons.mic_none_outlined,
|
|
color: widget.color,
|
|
),
|
|
),
|
|
secondChild: Material(
|
|
color: widget.color.withAlpha(64),
|
|
borderRadius:
|
|
BorderRadius.circular(AppConfig.borderRadius),
|
|
child: InkWell(
|
|
borderRadius:
|
|
BorderRadius.circular(AppConfig.borderRadius),
|
|
onTap: _toggleSpeed,
|
|
child: SizedBox(
|
|
width: 32,
|
|
height: 20,
|
|
child: Center(
|
|
child: Text(
|
|
'${audioPlayer?.speed.toString()}x',
|
|
style: TextStyle(
|
|
color: widget.color,
|
|
fontSize: 9,
|
|
),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
alignment: Alignment.center,
|
|
crossFadeState: audioPlayer == null
|
|
? CrossFadeState.showFirst
|
|
: CrossFadeState.showSecond,
|
|
duration: FluffyThemes.animationDuration,
|
|
),
|
|
],
|
|
),
|
|
),
|
|
if (fileDescription != null) ...[
|
|
const SizedBox(height: 8),
|
|
Padding(
|
|
padding: const EdgeInsets.symmetric(
|
|
horizontal: 16,
|
|
vertical: 8,
|
|
),
|
|
child: Linkify(
|
|
text: fileDescription,
|
|
textScaleFactor:
|
|
MediaQuery.textScalerOf(context).scale(1),
|
|
style: TextStyle(
|
|
color: widget.color,
|
|
fontSize: widget.fontSize,
|
|
),
|
|
options: const LinkifyOptions(humanize: false),
|
|
linkStyle: TextStyle(
|
|
color: widget.linkColor,
|
|
fontSize: widget.fontSize,
|
|
decoration: TextDecoration.underline,
|
|
decorationColor: widget.linkColor,
|
|
),
|
|
onOpen: (url) =>
|
|
UrlLauncher(context, url.url).launchUrl(),
|
|
),
|
|
),
|
|
],
|
|
],
|
|
),
|
|
);
|
|
},
|
|
);
|
|
},
|
|
);
|
|
}
|
|
}
|
|
|
|
/// To use a MatrixFile as an AudioSource for the just_audio package
|
|
class MatrixFileAudioSource extends StreamAudioSource {
|
|
final MatrixFile file;
|
|
|
|
MatrixFileAudioSource(this.file);
|
|
|
|
@override
|
|
Future<StreamAudioResponse> request([int? start, int? end]) async {
|
|
start ??= 0;
|
|
end ??= file.bytes.length;
|
|
return StreamAudioResponse(
|
|
sourceLength: file.bytes.length,
|
|
contentLength: end - start,
|
|
offset: start,
|
|
stream: Stream.value(file.bytes.sublist(start, end)),
|
|
contentType: file.mimeType,
|
|
);
|
|
}
|
|
}
|
|
|
|
extension on AudioPlayer {
|
|
bool get isAtEndPosition {
|
|
final duration = this.duration;
|
|
if (duration == null) return true;
|
|
return position >= duration;
|
|
}
|
|
}
|
|
|
|
extension on Duration {
|
|
String get minuteSecondString =>
|
|
'${inMinutes.toString().padLeft(2, '0')}:${(inSeconds % 60).toString().padLeft(2, '0')}';
|
|
}
|