feat: Nicer record voice message UI with pause function

pull/2234/head
Christian Kußowski 2 months ago
parent 4482b37feb
commit e86deb99c9
No known key found for this signature in database
GPG Key ID: E067ECD60F1A0652

@ -3375,5 +3375,8 @@
"@moreEvents": {},
"declineInvitation": "Decline invitation",
"@declineInvitation": {},
"noMessagesYet": "No messages yet"
"noMessagesYet": "No messages yet",
"longPressToRecordVoiceMessage": "Long press to record voice message.",
"pause": "Pause",
"resume": "Resume"
}

@ -12,7 +12,6 @@ import 'package:emoji_picker_flutter/emoji_picker_flutter.dart';
import 'package:go_router/go_router.dart';
import 'package:image_picker/image_picker.dart';
import 'package:matrix/matrix.dart';
import 'package:record/record.dart';
import 'package:scroll_to_index/scroll_to_index.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:universal_html/html.dart' as html;
@ -23,7 +22,6 @@ import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pages/chat/chat_view.dart';
import 'package:fluffychat/pages/chat/event_info_dialog.dart';
import 'package:fluffychat/pages/chat/recording_dialog.dart';
import 'package:fluffychat/pages/chat_details/chat_details.dart';
import 'package:fluffychat/utils/error_reporter.dart';
import 'package:fluffychat/utils/file_selector.dart';
@ -648,31 +646,14 @@ class ChatController extends State<ChatPageWithRoom>
);
}
void voiceMessageAction() async {
room.client.getConfig(); // Preload server file configuration.
Future<void> onVoiceMessageSend(
String path,
int duration,
List<int> waveform,
String? fileName,
) async {
final scaffoldMessenger = ScaffoldMessenger.of(context);
if (PlatformInfos.isAndroid) {
final info = await DeviceInfoPlugin().androidInfo;
if (info.version.sdkInt < 19) {
showOkAlertDialog(
context: context,
title: L10n.of(context).unsupportedAndroidVersion,
message: L10n.of(context).unsupportedAndroidVersionLong,
okLabel: L10n.of(context).close,
);
return;
}
}
if (await AudioRecorder().hasPermission() == false) return;
final result = await showDialog<RecordingResult>(
context: context,
barrierDismissible: false,
builder: (c) => const RecordingDialog(),
);
if (result == null) return;
final audioFile = XFile(result.path);
final audioFile = XFile(path);
final bytesResult = await showFutureLoadingDialog(
context: context,
@ -683,7 +664,7 @@ class ChatController extends State<ChatPageWithRoom>
final file = MatrixAudioFile(
bytes: bytes,
name: result.fileName ?? audioFile.path,
name: fileName ?? audioFile.path,
);
await room.sendFileEvent(
@ -692,12 +673,12 @@ class ChatController extends State<ChatPageWithRoom>
extraContent: {
'info': {
...file.info,
'duration': result.duration,
'duration': duration,
},
'org.matrix.msc3245.voice': {},
'org.matrix.msc1767.audio': {
'duration': result.duration,
'waveform': result.waveform,
'duration': duration,
'waveform': waveform,
},
},
).catchError((e) {

@ -5,6 +5,8 @@ import 'package:matrix/matrix.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pages/chat/recording_input_row.dart';
import 'package:fluffychat/pages/chat/recording_view_model.dart';
import 'package:fluffychat/utils/other_party_can_receive.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/widgets/avatar.dart';
@ -41,297 +43,324 @@ class ChatInputRow extends StatelessWidget {
foregroundColor: theme.colorScheme.onTertiaryContainer,
);
return Row(
crossAxisAlignment: CrossAxisAlignment.end,
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: controller.selectMode
? <Widget>[
if (controller.selectedEvents
.every((event) => event.status == EventStatus.error))
SizedBox(
height: height,
child: TextButton(
style: TextButton.styleFrom(
foregroundColor: theme.colorScheme.error,
),
onPressed: controller.deleteErrorEventsAction,
child: Row(
children: <Widget>[
const Icon(Icons.delete_forever_outlined),
Text(L10n.of(context).delete),
],
),
),
)
else
SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.forwardEventsAction,
child: Row(
children: <Widget>[
const Icon(Icons.keyboard_arrow_left_outlined),
Text(L10n.of(context).forward),
],
return RecordingViewModel(
builder: (context, recordingViewModel) {
if (recordingViewModel.isRecording) {
return RecordingInputRow(
state: recordingViewModel,
onSend: controller.onVoiceMessageSend,
);
}
return Row(
crossAxisAlignment: CrossAxisAlignment.end,
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: controller.selectMode
? <Widget>[
if (controller.selectedEvents
.every((event) => event.status == EventStatus.error))
SizedBox(
height: height,
child: TextButton(
style: TextButton.styleFrom(
foregroundColor: theme.colorScheme.error,
),
onPressed: controller.deleteErrorEventsAction,
child: Row(
children: <Widget>[
const Icon(Icons.delete_forever_outlined),
Text(L10n.of(context).delete),
],
),
),
)
else
SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.forwardEventsAction,
child: Row(
children: <Widget>[
const Icon(Icons.keyboard_arrow_left_outlined),
Text(L10n.of(context).forward),
],
),
),
),
),
),
controller.selectedEvents.length == 1
? controller.selectedEvents.first
.getDisplayEvent(controller.timeline!)
.status
.isSent
? SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.replyAction,
child: Row(
children: <Widget>[
Text(L10n.of(context).reply),
const Icon(Icons.keyboard_arrow_right),
],
controller.selectedEvents.length == 1
? controller.selectedEvents.first
.getDisplayEvent(controller.timeline!)
.status
.isSent
? SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.replyAction,
child: Row(
children: <Widget>[
Text(L10n.of(context).reply),
const Icon(Icons.keyboard_arrow_right),
],
),
),
)
: SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.sendAgainAction,
child: Row(
children: <Widget>[
Text(L10n.of(context).tryToSendAgain),
const SizedBox(width: 4),
const Icon(Icons.send_outlined, size: 16),
],
),
),
)
: const SizedBox.shrink(),
]
: <Widget>[
const SizedBox(width: 4),
AnimatedContainer(
duration: FluffyThemes.animationDuration,
curve: FluffyThemes.animationCurve,
width:
controller.sendController.text.isNotEmpty ? 0 : height,
height: height,
alignment: Alignment.center,
decoration: const BoxDecoration(),
clipBehavior: Clip.hardEdge,
child: PopupMenuButton<String>(
useRootNavigator: true,
icon: const Icon(Icons.add_circle_outline),
iconColor: theme.colorScheme.onPrimaryContainer,
onSelected: controller.onAddPopupMenuButtonSelected,
itemBuilder: (BuildContext context) =>
<PopupMenuEntry<String>>[
if (PlatformInfos.isMobile)
PopupMenuItem<String>(
value: 'location',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child: const Icon(Icons.gps_fixed_outlined),
),
title: Text(L10n.of(context).shareLocation),
contentPadding: const EdgeInsets.all(0),
),
),
)
: SizedBox(
height: height,
child: TextButton(
style: selectedTextButtonStyle,
onPressed: controller.sendAgainAction,
child: Row(
children: <Widget>[
Text(L10n.of(context).tryToSendAgain),
const SizedBox(width: 4),
const Icon(Icons.send_outlined, size: 16),
],
PopupMenuItem<String>(
value: 'image',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child: const Icon(Icons.photo_outlined),
),
title: Text(L10n.of(context).sendImage),
contentPadding: const EdgeInsets.all(0),
),
)
: const SizedBox.shrink(),
]
: <Widget>[
const SizedBox(width: 4),
AnimatedContainer(
duration: FluffyThemes.animationDuration,
curve: FluffyThemes.animationCurve,
width: controller.sendController.text.isNotEmpty ? 0 : height,
height: height,
alignment: Alignment.center,
decoration: const BoxDecoration(),
clipBehavior: Clip.hardEdge,
child: PopupMenuButton<String>(
useRootNavigator: true,
icon: const Icon(Icons.add_circle_outline),
iconColor: theme.colorScheme.onPrimaryContainer,
onSelected: controller.onAddPopupMenuButtonSelected,
itemBuilder: (BuildContext context) =>
<PopupMenuEntry<String>>[
if (PlatformInfos.isMobile)
PopupMenuItem<String>(
value: 'location',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.gps_fixed_outlined),
),
PopupMenuItem<String>(
value: 'video',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child:
const Icon(Icons.video_camera_back_outlined),
),
title: Text(L10n.of(context).sendVideo),
contentPadding: const EdgeInsets.all(0),
),
title: Text(L10n.of(context).shareLocation),
contentPadding: const EdgeInsets.all(0),
),
),
PopupMenuItem<String>(
value: 'image',
child: ListTile(
leading: CircleAvatar(
backgroundColor: theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.photo_outlined),
PopupMenuItem<String>(
value: 'file',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child: const Icon(Icons.attachment_outlined),
),
title: Text(L10n.of(context).sendFile),
contentPadding: const EdgeInsets.all(0),
),
),
title: Text(L10n.of(context).sendImage),
contentPadding: const EdgeInsets.all(0),
),
],
),
PopupMenuItem<String>(
value: 'video',
child: ListTile(
leading: CircleAvatar(
backgroundColor: theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.video_camera_back_outlined),
),
title: Text(L10n.of(context).sendVideo),
contentPadding: const EdgeInsets.all(0),
),
if (PlatformInfos.isMobile)
AnimatedContainer(
duration: FluffyThemes.animationDuration,
curve: FluffyThemes.animationCurve,
width: controller.sendController.text.isNotEmpty
? 0
: height,
height: height,
alignment: Alignment.center,
decoration: const BoxDecoration(),
clipBehavior: Clip.hardEdge,
child: PopupMenuButton(
useRootNavigator: true,
icon: const Icon(Icons.camera_alt_outlined),
onSelected: controller.onAddPopupMenuButtonSelected,
iconColor: theme.colorScheme.onPrimaryContainer,
itemBuilder: (context) => [
PopupMenuItem<String>(
value: 'camera-video',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child: const Icon(Icons.videocam_outlined),
),
title: Text(L10n.of(context).recordAVideo),
contentPadding: const EdgeInsets.all(0),
),
),
PopupMenuItem<String>(
value: 'camera',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor:
theme.colorScheme.primaryContainer,
child: const Icon(Icons.camera_alt_outlined),
),
title: Text(L10n.of(context).takeAPhoto),
contentPadding: const EdgeInsets.all(0),
),
),
],
),
),
PopupMenuItem<String>(
value: 'file',
child: ListTile(
leading: CircleAvatar(
backgroundColor: theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.attachment_outlined),
Container(
height: height,
width: height,
alignment: Alignment.center,
child: IconButton(
tooltip: L10n.of(context).emojis,
color: theme.colorScheme.onPrimaryContainer,
icon: PageTransitionSwitcher(
transitionBuilder: (
Widget child,
Animation<double> primaryAnimation,
Animation<double> secondaryAnimation,
) {
return SharedAxisTransition(
animation: primaryAnimation,
secondaryAnimation: secondaryAnimation,
transitionType: SharedAxisTransitionType.scaled,
fillColor: Colors.transparent,
child: child,
);
},
child: Icon(
controller.showEmojiPicker
? Icons.keyboard
: Icons.add_reaction_outlined,
key: ValueKey(controller.showEmojiPicker),
),
title: Text(L10n.of(context).sendFile),
contentPadding: const EdgeInsets.all(0),
),
onPressed: controller.emojiPickerAction,
),
],
),
),
if (PlatformInfos.isMobile)
AnimatedContainer(
duration: FluffyThemes.animationDuration,
curve: FluffyThemes.animationCurve,
width: controller.sendController.text.isNotEmpty ? 0 : height,
height: height,
alignment: Alignment.center,
decoration: const BoxDecoration(),
clipBehavior: Clip.hardEdge,
child: PopupMenuButton(
useRootNavigator: true,
icon: const Icon(Icons.camera_alt_outlined),
onSelected: controller.onAddPopupMenuButtonSelected,
iconColor: theme.colorScheme.onPrimaryContainer,
itemBuilder: (context) => [
PopupMenuItem<String>(
value: 'camera-video',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.videocam_outlined),
),
title: Text(L10n.of(context).recordAVideo),
contentPadding: const EdgeInsets.all(0),
),
),
PopupMenuItem<String>(
value: 'camera',
child: ListTile(
leading: CircleAvatar(
backgroundColor:
theme.colorScheme.onPrimaryContainer,
foregroundColor: theme.colorScheme.primaryContainer,
child: const Icon(Icons.camera_alt_outlined),
),
title: Text(L10n.of(context).takeAPhoto),
contentPadding: const EdgeInsets.all(0),
),
),
],
),
),
Container(
height: height,
width: height,
alignment: Alignment.center,
child: IconButton(
tooltip: L10n.of(context).emojis,
color: theme.colorScheme.onPrimaryContainer,
icon: PageTransitionSwitcher(
transitionBuilder: (
Widget child,
Animation<double> primaryAnimation,
Animation<double> secondaryAnimation,
) {
return SharedAxisTransition(
animation: primaryAnimation,
secondaryAnimation: secondaryAnimation,
transitionType: SharedAxisTransitionType.scaled,
fillColor: Colors.transparent,
child: child,
);
},
child: Icon(
controller.showEmojiPicker
? Icons.keyboard
: Icons.add_reaction_outlined,
key: ValueKey(controller.showEmojiPicker),
if (Matrix.of(context).isMultiAccount &&
Matrix.of(context).hasComplexBundles &&
Matrix.of(context).currentBundle!.length > 1)
Container(
height: height,
width: height,
alignment: Alignment.center,
child: _ChatAccountPicker(controller),
),
),
onPressed: controller.emojiPickerAction,
),
),
if (Matrix.of(context).isMultiAccount &&
Matrix.of(context).hasComplexBundles &&
Matrix.of(context).currentBundle!.length > 1)
Container(
height: height,
width: height,
alignment: Alignment.center,
child: _ChatAccountPicker(controller),
),
Expanded(
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 0.0),
child: InputBar(
room: controller.room,
minLines: 1,
maxLines: 8,
autofocus: !PlatformInfos.isMobile,
keyboardType: TextInputType.multiline,
textInputAction:
AppConfig.sendOnEnter == true && PlatformInfos.isMobile
Expanded(
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 0.0),
child: InputBar(
room: controller.room,
minLines: 1,
maxLines: 8,
autofocus: !PlatformInfos.isMobile,
keyboardType: TextInputType.multiline,
textInputAction: AppConfig.sendOnEnter == true &&
PlatformInfos.isMobile
? TextInputAction.send
: null,
onSubmitted: controller.onInputBarSubmitted,
onSubmitImage: controller.sendImageFromClipBoard,
focusNode: controller.inputFocus,
controller: controller.sendController,
decoration: InputDecoration(
contentPadding: const EdgeInsets.only(
left: 6.0,
right: 6.0,
bottom: 6.0,
top: 3.0,
onSubmitted: controller.onInputBarSubmitted,
onSubmitImage: controller.sendImageFromClipBoard,
focusNode: controller.inputFocus,
controller: controller.sendController,
decoration: InputDecoration(
contentPadding: const EdgeInsets.only(
left: 6.0,
right: 6.0,
bottom: 6.0,
top: 3.0,
),
counter: const SizedBox.shrink(),
hintText: L10n.of(context).writeAMessage,
hintMaxLines: 1,
border: InputBorder.none,
enabledBorder: InputBorder.none,
filled: false,
),
onChanged: controller.onInputBarChanged,
),
counter: const SizedBox.shrink(),
hintText: L10n.of(context).writeAMessage,
hintMaxLines: 1,
border: InputBorder.none,
enabledBorder: InputBorder.none,
filled: false,
),
onChanged: controller.onInputBarChanged,
),
),
),
Container(
height: height,
width: height,
alignment: Alignment.center,
child: PlatformInfos.platformCanRecord &&
controller.sendController.text.isEmpty
? FloatingActionButton.small(
tooltip: L10n.of(context).voiceMessage,
onPressed: controller.voiceMessageAction,
elevation: 0,
heroTag: null,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(height),
),
backgroundColor: theme.bubbleColor,
foregroundColor: theme.onBubbleColor,
child: const Icon(Icons.mic_none_outlined),
)
: FloatingActionButton.small(
tooltip: L10n.of(context).send,
onPressed: controller.send,
elevation: 0,
heroTag: null,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(height),
),
backgroundColor: theme.bubbleColor,
foregroundColor: theme.onBubbleColor,
child: const Icon(Icons.send_outlined),
),
),
],
Container(
height: height,
width: height,
alignment: Alignment.center,
child: PlatformInfos.platformCanRecord &&
controller.sendController.text.isEmpty
? IconButton(
tooltip: L10n.of(context).voiceMessage,
onPressed: () =>
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
content: Text(
L10n.of(context)
.longPressToRecordVoiceMessage,
),
),
),
onLongPress: () => recordingViewModel
.startRecording(controller.room),
style: IconButton.styleFrom(
backgroundColor: theme.bubbleColor,
foregroundColor: theme.onBubbleColor,
),
icon: const Icon(Icons.mic_none_outlined),
)
: IconButton(
tooltip: L10n.of(context).send,
onPressed: controller.send,
style: IconButton.styleFrom(
backgroundColor: theme.bubbleColor,
foregroundColor: theme.onBubbleColor,
),
icon: const Icon(Icons.send_outlined),
),
),
],
);
},
);
}
}

@ -1,256 +0,0 @@
import 'dart:async';
import 'package:flutter/cupertino.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:path/path.dart' as path_lib;
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
import 'package:fluffychat/config/app_config.dart';
import 'package:fluffychat/config/setting_keys.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'events/audio_player.dart';
class RecordingDialog extends StatefulWidget {
const RecordingDialog({
super.key,
});
@override
RecordingDialogState createState() => RecordingDialogState();
}
class RecordingDialogState extends State<RecordingDialog> {
Timer? _recorderSubscription;
Duration _duration = Duration.zero;
bool error = false;
final _audioRecorder = AudioRecorder();
final List<double> amplitudeTimeline = [];
String? fileName;
Future<void> startRecording() async {
final store = Matrix.of(context).store;
try {
final codec = kIsWeb
// Web seems to create webm instead of ogg when using opus encoder
// which does not play on iOS right now. So we use wav for now:
? AudioEncoder.wav
// Everywhere else we use opus if supported by the platform:
: await _audioRecorder.isEncoderSupported(AudioEncoder.opus)
? AudioEncoder.opus
: AudioEncoder.aacLc;
fileName =
'recording${DateTime.now().microsecondsSinceEpoch}.${codec.fileExtension}';
String? path;
if (!kIsWeb) {
final tempDir = await getTemporaryDirectory();
path = path_lib.join(tempDir.path, fileName);
}
final result = await _audioRecorder.hasPermission();
if (result != true) {
setState(() => error = true);
return;
}
await WakelockPlus.enable();
await _audioRecorder.start(
RecordConfig(
bitRate: AppSettings.audioRecordingBitRate.getItem(store),
sampleRate: AppSettings.audioRecordingSamplingRate.getItem(store),
numChannels: AppSettings.audioRecordingNumChannels.getItem(store),
autoGain: AppSettings.audioRecordingAutoGain.getItem(store),
echoCancel: AppSettings.audioRecordingEchoCancel.getItem(store),
noiseSuppress: AppSettings.audioRecordingNoiseSuppress.getItem(store),
encoder: codec,
),
path: path ?? '',
);
setState(() => _duration = Duration.zero);
_recorderSubscription?.cancel();
_recorderSubscription =
Timer.periodic(const Duration(milliseconds: 100), (_) async {
final amplitude = await _audioRecorder.getAmplitude();
var value = 100 + amplitude.current * 2;
value = value < 1 ? 1 : value;
amplitudeTimeline.add(value);
setState(() {
_duration += const Duration(milliseconds: 100);
});
});
} catch (_) {
setState(() => error = true);
rethrow;
}
}
@override
void initState() {
super.initState();
startRecording();
}
@override
void dispose() {
WakelockPlus.disable();
_recorderSubscription?.cancel();
_audioRecorder.stop();
super.dispose();
}
void _stopAndSend() async {
_recorderSubscription?.cancel();
final path = await _audioRecorder.stop();
if (path == null) throw ('Recording failed!');
const waveCount = AudioPlayerWidget.wavesCount;
final step = amplitudeTimeline.length < waveCount
? 1
: (amplitudeTimeline.length / waveCount).round();
final waveform = <int>[];
for (var i = 0; i < amplitudeTimeline.length; i += step) {
waveform.add((amplitudeTimeline[i] / 100 * 1024).round());
}
Navigator.of(context, rootNavigator: false).pop<RecordingResult>(
RecordingResult(
path: path,
duration: _duration.inMilliseconds,
waveform: waveform,
fileName: fileName,
),
);
}
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
const maxDecibalWidth = 64.0;
final time =
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
final content = error
? Text(L10n.of(context).oopsSomethingWentWrong)
: Row(
children: [
Container(
width: 16,
height: 16,
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(32),
color: Colors.red,
),
),
Expanded(
child: Row(
mainAxisSize: MainAxisSize.min,
mainAxisAlignment: MainAxisAlignment.end,
children: amplitudeTimeline.reversed
.take(26)
.toList()
.reversed
.map(
(amplitude) => Container(
margin: const EdgeInsets.only(left: 2),
width: 4,
decoration: BoxDecoration(
color: theme.colorScheme.primary,
borderRadius:
BorderRadius.circular(AppConfig.borderRadius),
),
height: maxDecibalWidth * (amplitude / 100),
),
)
.toList(),
),
),
const SizedBox(width: 8),
SizedBox(
width: 48,
child: Text(time),
),
],
);
if (PlatformInfos.isCupertinoStyle) {
return CupertinoAlertDialog(
content: content,
actions: [
CupertinoDialogAction(
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
child: Text(
L10n.of(context).cancel,
style: TextStyle(
color: theme.textTheme.bodyMedium?.color?.withAlpha(150),
),
),
),
if (error != true)
CupertinoDialogAction(
onPressed: _stopAndSend,
child: Text(L10n.of(context).send),
),
],
);
}
return AlertDialog(
content: content,
actions: [
TextButton(
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
child: Text(
L10n.of(context).cancel,
style: TextStyle(
color: theme.colorScheme.error,
),
),
),
if (error != true)
TextButton(
onPressed: _stopAndSend,
child: Text(L10n.of(context).send),
),
],
);
}
}
class RecordingResult {
final String path;
final int duration;
final List<int> waveform;
final String? fileName;
const RecordingResult({
required this.path,
required this.duration,
required this.waveform,
required this.fileName,
});
}
extension on AudioEncoder {
String get fileExtension {
switch (this) {
case AudioEncoder.aacLc:
case AudioEncoder.aacEld:
case AudioEncoder.aacHe:
return 'm4a';
case AudioEncoder.opus:
return 'ogg';
case AudioEncoder.wav:
return 'wav';
case AudioEncoder.amrNb:
case AudioEncoder.amrWb:
case AudioEncoder.flac:
case AudioEncoder.pcm16bits:
throw UnsupportedError('Not yet used');
}
}
}

@ -0,0 +1,89 @@
import 'package:flutter/material.dart';
import 'package:fluffychat/config/themes.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/pages/chat/recording_view_model.dart';
class RecordingInputRow extends StatelessWidget {
final RecordingViewModelState state;
final Future<void> Function(String, int, List<int>, String?) onSend;
const RecordingInputRow({
required this.state,
required this.onSend,
super.key,
});
@override
Widget build(BuildContext context) {
final theme = Theme.of(context);
const maxDecibalWidth = 36.0;
final time =
'${state.duration.inMinutes.toString().padLeft(2, '0')}:${(state.duration.inSeconds % 60).toString().padLeft(2, '0')}';
return Row(
children: [
IconButton(
tooltip: L10n.of(context).cancel,
icon: const Icon(Icons.delete_outlined),
color: theme.colorScheme.error,
onPressed: state.cancel,
),
if (state.isPaused)
IconButton(
tooltip: L10n.of(context).resume,
icon: const Icon(Icons.play_circle_outline_outlined),
onPressed: state.resume,
)
else
IconButton(
tooltip: L10n.of(context).pause,
icon: const Icon(Icons.pause_circle_outline_outlined),
onPressed: state.pause,
),
Text(time),
const SizedBox(width: 8),
Expanded(
child: LayoutBuilder(
builder: (context, constraints) {
const width = 4;
return Row(
mainAxisSize: MainAxisSize.min,
mainAxisAlignment: MainAxisAlignment.end,
children: state.amplitudeTimeline.reversed
.take((constraints.maxWidth / (width + 2)).floor())
.toList()
.reversed
.map(
(amplitude) => Container(
margin: const EdgeInsets.only(left: 2),
width: width.toDouble(),
decoration: BoxDecoration(
color: theme.colorScheme.primary,
borderRadius: BorderRadius.circular(2),
),
height: maxDecibalWidth * (amplitude / 100),
),
)
.toList(),
);
},
),
),
IconButton(
style: IconButton.styleFrom(
disabledBackgroundColor: theme.bubbleColor.withAlpha(128),
backgroundColor: theme.bubbleColor,
foregroundColor: theme.onBubbleColor,
),
tooltip: L10n.of(context).sendAudio,
icon: state.isSending
? const SizedBox.square(
dimension: 24,
child: CircularProgressIndicator.adaptive(),
)
: const Icon(Icons.send_outlined),
onPressed: state.isSending ? null : () => state.stopAndSend(onSend),
),
],
);
}
}

@ -0,0 +1,228 @@
import 'dart:async';
import 'package:flutter/cupertino.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:device_info_plus/device_info_plus.dart';
import 'package:matrix/matrix.dart';
import 'package:path/path.dart' as path_lib;
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
import 'package:fluffychat/config/setting_keys.dart';
import 'package:fluffychat/l10n/l10n.dart';
import 'package:fluffychat/utils/platform_infos.dart';
import 'package:fluffychat/widgets/adaptive_dialogs/show_ok_cancel_alert_dialog.dart';
import 'package:fluffychat/widgets/matrix.dart';
import 'events/audio_player.dart';
class RecordingViewModel extends StatefulWidget {
final Widget Function(BuildContext, RecordingViewModelState) builder;
const RecordingViewModel({
required this.builder,
super.key,
});
@override
RecordingViewModelState createState() => RecordingViewModelState();
}
class RecordingViewModelState extends State<RecordingViewModel> {
Timer? _recorderSubscription;
Duration duration = Duration.zero;
bool error = false;
bool isSending = false;
bool get isRecording => _audioRecorder != null;
AudioRecorder? _audioRecorder;
final List<double> amplitudeTimeline = [];
String? fileName;
bool isPaused = false;
Future<void> startRecording(Room room) async {
room.client.getConfig(); // Preload server file configuration.
if (PlatformInfos.isAndroid) {
final info = await DeviceInfoPlugin().androidInfo;
if (info.version.sdkInt < 19) {
showOkAlertDialog(
context: context,
title: L10n.of(context).unsupportedAndroidVersion,
message: L10n.of(context).unsupportedAndroidVersionLong,
okLabel: L10n.of(context).close,
);
return;
}
}
if (await AudioRecorder().hasPermission() == false) return;
final store = Matrix.of(context).store;
final audioRecorder = _audioRecorder ??= AudioRecorder();
setState(() {});
try {
final codec = kIsWeb
// Web seems to create webm instead of ogg when using opus encoder
// which does not play on iOS right now. So we use wav for now:
? AudioEncoder.wav
// Everywhere else we use opus if supported by the platform:
: await audioRecorder.isEncoderSupported(AudioEncoder.opus)
? AudioEncoder.opus
: AudioEncoder.aacLc;
fileName =
'recording${DateTime.now().microsecondsSinceEpoch}.${codec.fileExtension}';
String? path;
if (!kIsWeb) {
final tempDir = await getTemporaryDirectory();
path = path_lib.join(tempDir.path, fileName);
}
final result = await audioRecorder.hasPermission();
if (result != true) {
setState(() => error = true);
return;
}
await WakelockPlus.enable();
await audioRecorder.start(
RecordConfig(
bitRate: AppSettings.audioRecordingBitRate.getItem(store),
sampleRate: AppSettings.audioRecordingSamplingRate.getItem(store),
numChannels: AppSettings.audioRecordingNumChannels.getItem(store),
autoGain: AppSettings.audioRecordingAutoGain.getItem(store),
echoCancel: AppSettings.audioRecordingEchoCancel.getItem(store),
noiseSuppress: AppSettings.audioRecordingNoiseSuppress.getItem(store),
encoder: codec,
),
path: path ?? '',
);
setState(() => duration = Duration.zero);
_subscribe();
} catch (_) {
setState(() => error = true);
rethrow;
}
}
@override
void dispose() {
_reset();
super.dispose();
}
void _subscribe() {
_recorderSubscription?.cancel();
_recorderSubscription =
Timer.periodic(const Duration(milliseconds: 100), (_) async {
final amplitude = await _audioRecorder!.getAmplitude();
var value = 100 + amplitude.current * 2;
value = value < 1 ? 1 : value;
amplitudeTimeline.add(value);
setState(() {
duration += const Duration(milliseconds: 100);
});
});
}
void _reset() {
WakelockPlus.disable();
_recorderSubscription?.cancel();
_audioRecorder?.stop();
_audioRecorder = null;
isSending = false;
error = false;
fileName = null;
duration = Duration.zero;
amplitudeTimeline.clear();
isPaused = false;
}
void cancel() {
setState(() {
_reset();
});
}
void pause() {
_audioRecorder?.pause();
_recorderSubscription?.cancel();
setState(() {
isPaused = true;
});
}
void resume() {
_audioRecorder?.resume();
_subscribe();
setState(() {
isPaused = false;
});
}
void stopAndSend(
Future<void> Function(
String path,
int duration,
List<int> waveform,
String? fileName,
) onSend,
) async {
_recorderSubscription?.cancel();
final path = await _audioRecorder?.stop();
if (path == null) throw ('Recording failed!');
const waveCount = AudioPlayerWidget.wavesCount;
final step = amplitudeTimeline.length < waveCount
? 1
: (amplitudeTimeline.length / waveCount).round();
final waveform = <int>[];
for (var i = 0; i < amplitudeTimeline.length; i += step) {
waveform.add((amplitudeTimeline[i] / 100 * 1024).round());
}
setState(() {
isSending = true;
});
try {
await onSend(path, duration.inMilliseconds, waveform, fileName);
} catch (e, s) {
Logs().e('Unable to send voice message', e, s);
setState(() {
isSending = false;
});
return;
}
cancel();
}
@override
Widget build(BuildContext context) => widget.builder(context, this);
}
extension on AudioEncoder {
String get fileExtension {
switch (this) {
case AudioEncoder.aacLc:
case AudioEncoder.aacEld:
case AudioEncoder.aacHe:
return 'm4a';
case AudioEncoder.opus:
return 'ogg';
case AudioEncoder.wav:
return 'wav';
case AudioEncoder.amrNb:
case AudioEncoder.amrWb:
case AudioEncoder.flac:
case AudioEncoder.pcm16bits:
throw UnsupportedError('Not yet used');
}
}
}
Loading…
Cancel
Save