feat: Nicer record voice message UI with pause function
parent
4482b37feb
commit
e86deb99c9
@ -1,256 +0,0 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/cupertino.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import 'package:path/path.dart' as path_lib;
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:record/record.dart';
|
||||
import 'package:wakelock_plus/wakelock_plus.dart';
|
||||
|
||||
import 'package:fluffychat/config/app_config.dart';
|
||||
import 'package:fluffychat/config/setting_keys.dart';
|
||||
import 'package:fluffychat/l10n/l10n.dart';
|
||||
import 'package:fluffychat/utils/platform_infos.dart';
|
||||
import 'package:fluffychat/widgets/matrix.dart';
|
||||
import 'events/audio_player.dart';
|
||||
|
||||
class RecordingDialog extends StatefulWidget {
|
||||
const RecordingDialog({
|
||||
super.key,
|
||||
});
|
||||
|
||||
@override
|
||||
RecordingDialogState createState() => RecordingDialogState();
|
||||
}
|
||||
|
||||
class RecordingDialogState extends State<RecordingDialog> {
|
||||
Timer? _recorderSubscription;
|
||||
Duration _duration = Duration.zero;
|
||||
|
||||
bool error = false;
|
||||
|
||||
final _audioRecorder = AudioRecorder();
|
||||
final List<double> amplitudeTimeline = [];
|
||||
|
||||
String? fileName;
|
||||
|
||||
Future<void> startRecording() async {
|
||||
final store = Matrix.of(context).store;
|
||||
try {
|
||||
final codec = kIsWeb
|
||||
// Web seems to create webm instead of ogg when using opus encoder
|
||||
// which does not play on iOS right now. So we use wav for now:
|
||||
? AudioEncoder.wav
|
||||
// Everywhere else we use opus if supported by the platform:
|
||||
: await _audioRecorder.isEncoderSupported(AudioEncoder.opus)
|
||||
? AudioEncoder.opus
|
||||
: AudioEncoder.aacLc;
|
||||
fileName =
|
||||
'recording${DateTime.now().microsecondsSinceEpoch}.${codec.fileExtension}';
|
||||
String? path;
|
||||
if (!kIsWeb) {
|
||||
final tempDir = await getTemporaryDirectory();
|
||||
path = path_lib.join(tempDir.path, fileName);
|
||||
}
|
||||
|
||||
final result = await _audioRecorder.hasPermission();
|
||||
if (result != true) {
|
||||
setState(() => error = true);
|
||||
return;
|
||||
}
|
||||
await WakelockPlus.enable();
|
||||
|
||||
await _audioRecorder.start(
|
||||
RecordConfig(
|
||||
bitRate: AppSettings.audioRecordingBitRate.getItem(store),
|
||||
sampleRate: AppSettings.audioRecordingSamplingRate.getItem(store),
|
||||
numChannels: AppSettings.audioRecordingNumChannels.getItem(store),
|
||||
autoGain: AppSettings.audioRecordingAutoGain.getItem(store),
|
||||
echoCancel: AppSettings.audioRecordingEchoCancel.getItem(store),
|
||||
noiseSuppress: AppSettings.audioRecordingNoiseSuppress.getItem(store),
|
||||
encoder: codec,
|
||||
),
|
||||
path: path ?? '',
|
||||
);
|
||||
setState(() => _duration = Duration.zero);
|
||||
_recorderSubscription?.cancel();
|
||||
_recorderSubscription =
|
||||
Timer.periodic(const Duration(milliseconds: 100), (_) async {
|
||||
final amplitude = await _audioRecorder.getAmplitude();
|
||||
var value = 100 + amplitude.current * 2;
|
||||
value = value < 1 ? 1 : value;
|
||||
amplitudeTimeline.add(value);
|
||||
setState(() {
|
||||
_duration += const Duration(milliseconds: 100);
|
||||
});
|
||||
});
|
||||
} catch (_) {
|
||||
setState(() => error = true);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
startRecording();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
WakelockPlus.disable();
|
||||
_recorderSubscription?.cancel();
|
||||
_audioRecorder.stop();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
void _stopAndSend() async {
|
||||
_recorderSubscription?.cancel();
|
||||
final path = await _audioRecorder.stop();
|
||||
|
||||
if (path == null) throw ('Recording failed!');
|
||||
const waveCount = AudioPlayerWidget.wavesCount;
|
||||
final step = amplitudeTimeline.length < waveCount
|
||||
? 1
|
||||
: (amplitudeTimeline.length / waveCount).round();
|
||||
final waveform = <int>[];
|
||||
for (var i = 0; i < amplitudeTimeline.length; i += step) {
|
||||
waveform.add((amplitudeTimeline[i] / 100 * 1024).round());
|
||||
}
|
||||
Navigator.of(context, rootNavigator: false).pop<RecordingResult>(
|
||||
RecordingResult(
|
||||
path: path,
|
||||
duration: _duration.inMilliseconds,
|
||||
waveform: waveform,
|
||||
fileName: fileName,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final theme = Theme.of(context);
|
||||
|
||||
const maxDecibalWidth = 64.0;
|
||||
final time =
|
||||
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
|
||||
final content = error
|
||||
? Text(L10n.of(context).oopsSomethingWentWrong)
|
||||
: Row(
|
||||
children: [
|
||||
Container(
|
||||
width: 16,
|
||||
height: 16,
|
||||
decoration: BoxDecoration(
|
||||
borderRadius: BorderRadius.circular(32),
|
||||
color: Colors.red,
|
||||
),
|
||||
),
|
||||
Expanded(
|
||||
child: Row(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
mainAxisAlignment: MainAxisAlignment.end,
|
||||
children: amplitudeTimeline.reversed
|
||||
.take(26)
|
||||
.toList()
|
||||
.reversed
|
||||
.map(
|
||||
(amplitude) => Container(
|
||||
margin: const EdgeInsets.only(left: 2),
|
||||
width: 4,
|
||||
decoration: BoxDecoration(
|
||||
color: theme.colorScheme.primary,
|
||||
borderRadius:
|
||||
BorderRadius.circular(AppConfig.borderRadius),
|
||||
),
|
||||
height: maxDecibalWidth * (amplitude / 100),
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
),
|
||||
),
|
||||
const SizedBox(width: 8),
|
||||
SizedBox(
|
||||
width: 48,
|
||||
child: Text(time),
|
||||
),
|
||||
],
|
||||
);
|
||||
if (PlatformInfos.isCupertinoStyle) {
|
||||
return CupertinoAlertDialog(
|
||||
content: content,
|
||||
actions: [
|
||||
CupertinoDialogAction(
|
||||
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
|
||||
child: Text(
|
||||
L10n.of(context).cancel,
|
||||
style: TextStyle(
|
||||
color: theme.textTheme.bodyMedium?.color?.withAlpha(150),
|
||||
),
|
||||
),
|
||||
),
|
||||
if (error != true)
|
||||
CupertinoDialogAction(
|
||||
onPressed: _stopAndSend,
|
||||
child: Text(L10n.of(context).send),
|
||||
),
|
||||
],
|
||||
);
|
||||
}
|
||||
return AlertDialog(
|
||||
content: content,
|
||||
actions: [
|
||||
TextButton(
|
||||
onPressed: () => Navigator.of(context, rootNavigator: false).pop(),
|
||||
child: Text(
|
||||
L10n.of(context).cancel,
|
||||
style: TextStyle(
|
||||
color: theme.colorScheme.error,
|
||||
),
|
||||
),
|
||||
),
|
||||
if (error != true)
|
||||
TextButton(
|
||||
onPressed: _stopAndSend,
|
||||
child: Text(L10n.of(context).send),
|
||||
),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class RecordingResult {
|
||||
final String path;
|
||||
final int duration;
|
||||
final List<int> waveform;
|
||||
final String? fileName;
|
||||
|
||||
const RecordingResult({
|
||||
required this.path,
|
||||
required this.duration,
|
||||
required this.waveform,
|
||||
required this.fileName,
|
||||
});
|
||||
}
|
||||
|
||||
extension on AudioEncoder {
|
||||
String get fileExtension {
|
||||
switch (this) {
|
||||
case AudioEncoder.aacLc:
|
||||
case AudioEncoder.aacEld:
|
||||
case AudioEncoder.aacHe:
|
||||
return 'm4a';
|
||||
case AudioEncoder.opus:
|
||||
return 'ogg';
|
||||
case AudioEncoder.wav:
|
||||
return 'wav';
|
||||
case AudioEncoder.amrNb:
|
||||
case AudioEncoder.amrWb:
|
||||
case AudioEncoder.flac:
|
||||
case AudioEncoder.pcm16bits:
|
||||
throw UnsupportedError('Not yet used');
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,89 @@
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import 'package:fluffychat/config/themes.dart';
|
||||
import 'package:fluffychat/l10n/l10n.dart';
|
||||
import 'package:fluffychat/pages/chat/recording_view_model.dart';
|
||||
|
||||
class RecordingInputRow extends StatelessWidget {
|
||||
final RecordingViewModelState state;
|
||||
final Future<void> Function(String, int, List<int>, String?) onSend;
|
||||
const RecordingInputRow({
|
||||
required this.state,
|
||||
required this.onSend,
|
||||
super.key,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final theme = Theme.of(context);
|
||||
const maxDecibalWidth = 36.0;
|
||||
final time =
|
||||
'${state.duration.inMinutes.toString().padLeft(2, '0')}:${(state.duration.inSeconds % 60).toString().padLeft(2, '0')}';
|
||||
return Row(
|
||||
children: [
|
||||
IconButton(
|
||||
tooltip: L10n.of(context).cancel,
|
||||
icon: const Icon(Icons.delete_outlined),
|
||||
color: theme.colorScheme.error,
|
||||
onPressed: state.cancel,
|
||||
),
|
||||
if (state.isPaused)
|
||||
IconButton(
|
||||
tooltip: L10n.of(context).resume,
|
||||
icon: const Icon(Icons.play_circle_outline_outlined),
|
||||
onPressed: state.resume,
|
||||
)
|
||||
else
|
||||
IconButton(
|
||||
tooltip: L10n.of(context).pause,
|
||||
icon: const Icon(Icons.pause_circle_outline_outlined),
|
||||
onPressed: state.pause,
|
||||
),
|
||||
Text(time),
|
||||
const SizedBox(width: 8),
|
||||
Expanded(
|
||||
child: LayoutBuilder(
|
||||
builder: (context, constraints) {
|
||||
const width = 4;
|
||||
return Row(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
mainAxisAlignment: MainAxisAlignment.end,
|
||||
children: state.amplitudeTimeline.reversed
|
||||
.take((constraints.maxWidth / (width + 2)).floor())
|
||||
.toList()
|
||||
.reversed
|
||||
.map(
|
||||
(amplitude) => Container(
|
||||
margin: const EdgeInsets.only(left: 2),
|
||||
width: width.toDouble(),
|
||||
decoration: BoxDecoration(
|
||||
color: theme.colorScheme.primary,
|
||||
borderRadius: BorderRadius.circular(2),
|
||||
),
|
||||
height: maxDecibalWidth * (amplitude / 100),
|
||||
),
|
||||
)
|
||||
.toList(),
|
||||
);
|
||||
},
|
||||
),
|
||||
),
|
||||
IconButton(
|
||||
style: IconButton.styleFrom(
|
||||
disabledBackgroundColor: theme.bubbleColor.withAlpha(128),
|
||||
backgroundColor: theme.bubbleColor,
|
||||
foregroundColor: theme.onBubbleColor,
|
||||
),
|
||||
tooltip: L10n.of(context).sendAudio,
|
||||
icon: state.isSending
|
||||
? const SizedBox.square(
|
||||
dimension: 24,
|
||||
child: CircularProgressIndicator.adaptive(),
|
||||
)
|
||||
: const Icon(Icons.send_outlined),
|
||||
onPressed: state.isSending ? null : () => state.stopAndSend(onSend),
|
||||
),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,228 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:flutter/cupertino.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import 'package:device_info_plus/device_info_plus.dart';
|
||||
import 'package:matrix/matrix.dart';
|
||||
import 'package:path/path.dart' as path_lib;
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:record/record.dart';
|
||||
import 'package:wakelock_plus/wakelock_plus.dart';
|
||||
|
||||
import 'package:fluffychat/config/setting_keys.dart';
|
||||
import 'package:fluffychat/l10n/l10n.dart';
|
||||
import 'package:fluffychat/utils/platform_infos.dart';
|
||||
import 'package:fluffychat/widgets/adaptive_dialogs/show_ok_cancel_alert_dialog.dart';
|
||||
import 'package:fluffychat/widgets/matrix.dart';
|
||||
import 'events/audio_player.dart';
|
||||
|
||||
class RecordingViewModel extends StatefulWidget {
|
||||
final Widget Function(BuildContext, RecordingViewModelState) builder;
|
||||
|
||||
const RecordingViewModel({
|
||||
required this.builder,
|
||||
super.key,
|
||||
});
|
||||
|
||||
@override
|
||||
RecordingViewModelState createState() => RecordingViewModelState();
|
||||
}
|
||||
|
||||
class RecordingViewModelState extends State<RecordingViewModel> {
|
||||
Timer? _recorderSubscription;
|
||||
Duration duration = Duration.zero;
|
||||
|
||||
bool error = false;
|
||||
bool isSending = false;
|
||||
|
||||
bool get isRecording => _audioRecorder != null;
|
||||
|
||||
AudioRecorder? _audioRecorder;
|
||||
final List<double> amplitudeTimeline = [];
|
||||
|
||||
String? fileName;
|
||||
|
||||
bool isPaused = false;
|
||||
|
||||
Future<void> startRecording(Room room) async {
|
||||
room.client.getConfig(); // Preload server file configuration.
|
||||
if (PlatformInfos.isAndroid) {
|
||||
final info = await DeviceInfoPlugin().androidInfo;
|
||||
if (info.version.sdkInt < 19) {
|
||||
showOkAlertDialog(
|
||||
context: context,
|
||||
title: L10n.of(context).unsupportedAndroidVersion,
|
||||
message: L10n.of(context).unsupportedAndroidVersionLong,
|
||||
okLabel: L10n.of(context).close,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (await AudioRecorder().hasPermission() == false) return;
|
||||
|
||||
final store = Matrix.of(context).store;
|
||||
|
||||
final audioRecorder = _audioRecorder ??= AudioRecorder();
|
||||
setState(() {});
|
||||
|
||||
try {
|
||||
final codec = kIsWeb
|
||||
// Web seems to create webm instead of ogg when using opus encoder
|
||||
// which does not play on iOS right now. So we use wav for now:
|
||||
? AudioEncoder.wav
|
||||
// Everywhere else we use opus if supported by the platform:
|
||||
: await audioRecorder.isEncoderSupported(AudioEncoder.opus)
|
||||
? AudioEncoder.opus
|
||||
: AudioEncoder.aacLc;
|
||||
fileName =
|
||||
'recording${DateTime.now().microsecondsSinceEpoch}.${codec.fileExtension}';
|
||||
String? path;
|
||||
if (!kIsWeb) {
|
||||
final tempDir = await getTemporaryDirectory();
|
||||
path = path_lib.join(tempDir.path, fileName);
|
||||
}
|
||||
|
||||
final result = await audioRecorder.hasPermission();
|
||||
if (result != true) {
|
||||
setState(() => error = true);
|
||||
return;
|
||||
}
|
||||
await WakelockPlus.enable();
|
||||
|
||||
await audioRecorder.start(
|
||||
RecordConfig(
|
||||
bitRate: AppSettings.audioRecordingBitRate.getItem(store),
|
||||
sampleRate: AppSettings.audioRecordingSamplingRate.getItem(store),
|
||||
numChannels: AppSettings.audioRecordingNumChannels.getItem(store),
|
||||
autoGain: AppSettings.audioRecordingAutoGain.getItem(store),
|
||||
echoCancel: AppSettings.audioRecordingEchoCancel.getItem(store),
|
||||
noiseSuppress: AppSettings.audioRecordingNoiseSuppress.getItem(store),
|
||||
encoder: codec,
|
||||
),
|
||||
path: path ?? '',
|
||||
);
|
||||
setState(() => duration = Duration.zero);
|
||||
_subscribe();
|
||||
} catch (_) {
|
||||
setState(() => error = true);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_reset();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
void _subscribe() {
|
||||
_recorderSubscription?.cancel();
|
||||
_recorderSubscription =
|
||||
Timer.periodic(const Duration(milliseconds: 100), (_) async {
|
||||
final amplitude = await _audioRecorder!.getAmplitude();
|
||||
var value = 100 + amplitude.current * 2;
|
||||
value = value < 1 ? 1 : value;
|
||||
amplitudeTimeline.add(value);
|
||||
setState(() {
|
||||
duration += const Duration(milliseconds: 100);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void _reset() {
|
||||
WakelockPlus.disable();
|
||||
_recorderSubscription?.cancel();
|
||||
_audioRecorder?.stop();
|
||||
_audioRecorder = null;
|
||||
isSending = false;
|
||||
error = false;
|
||||
fileName = null;
|
||||
duration = Duration.zero;
|
||||
amplitudeTimeline.clear();
|
||||
isPaused = false;
|
||||
}
|
||||
|
||||
void cancel() {
|
||||
setState(() {
|
||||
_reset();
|
||||
});
|
||||
}
|
||||
|
||||
void pause() {
|
||||
_audioRecorder?.pause();
|
||||
_recorderSubscription?.cancel();
|
||||
setState(() {
|
||||
isPaused = true;
|
||||
});
|
||||
}
|
||||
|
||||
void resume() {
|
||||
_audioRecorder?.resume();
|
||||
_subscribe();
|
||||
setState(() {
|
||||
isPaused = false;
|
||||
});
|
||||
}
|
||||
|
||||
void stopAndSend(
|
||||
Future<void> Function(
|
||||
String path,
|
||||
int duration,
|
||||
List<int> waveform,
|
||||
String? fileName,
|
||||
) onSend,
|
||||
) async {
|
||||
_recorderSubscription?.cancel();
|
||||
final path = await _audioRecorder?.stop();
|
||||
|
||||
if (path == null) throw ('Recording failed!');
|
||||
const waveCount = AudioPlayerWidget.wavesCount;
|
||||
final step = amplitudeTimeline.length < waveCount
|
||||
? 1
|
||||
: (amplitudeTimeline.length / waveCount).round();
|
||||
final waveform = <int>[];
|
||||
for (var i = 0; i < amplitudeTimeline.length; i += step) {
|
||||
waveform.add((amplitudeTimeline[i] / 100 * 1024).round());
|
||||
}
|
||||
|
||||
setState(() {
|
||||
isSending = true;
|
||||
});
|
||||
try {
|
||||
await onSend(path, duration.inMilliseconds, waveform, fileName);
|
||||
} catch (e, s) {
|
||||
Logs().e('Unable to send voice message', e, s);
|
||||
setState(() {
|
||||
isSending = false;
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
cancel();
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) => widget.builder(context, this);
|
||||
}
|
||||
|
||||
extension on AudioEncoder {
|
||||
String get fileExtension {
|
||||
switch (this) {
|
||||
case AudioEncoder.aacLc:
|
||||
case AudioEncoder.aacEld:
|
||||
case AudioEncoder.aacHe:
|
||||
return 'm4a';
|
||||
case AudioEncoder.opus:
|
||||
return 'ogg';
|
||||
case AudioEncoder.wav:
|
||||
return 'wav';
|
||||
case AudioEncoder.amrNb:
|
||||
case AudioEncoder.amrWb:
|
||||
case AudioEncoder.flac:
|
||||
case AudioEncoder.pcm16bits:
|
||||
throw UnsupportedError('Not yet used');
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue