refactor: Recording dialog

pull/1224/head^2
Krille 1 year ago
parent be0fbd5ab0
commit 3286b19387
No known key found for this signature in database
GPG Key ID: E067ECD60F1A0652

@ -621,10 +621,10 @@ class ChatController extends State<ChatPageWithRoom>
builder: (c) => const RecordingDialog(),
);
if (result == null) return;
final audioFile = File(result.path);
final audioFile = XFile(result.path);
final file = MatrixAudioFile(
bytes: audioFile.readAsBytesSync(),
name: audioFile.path,
bytes: await audioFile.readAsBytes(),
name: result.fileName ?? audioFile.path,
);
await room.sendFileEvent(
file,

@ -1,9 +1,11 @@
import 'dart:async';
import 'package:flutter/cupertino.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
import 'package:path/path.dart' as path_lib;
import 'package:path_provider/path_provider.dart';
import 'package:record/record.dart';
import 'package:wakelock_plus/wakelock_plus.dart';
@ -26,10 +28,12 @@ class RecordingDialogState extends State<RecordingDialog> {
Duration _duration = Duration.zero;
bool error = false;
String? _recordedPath;
final _audioRecorder = AudioRecorder();
final List<double> amplitudeTimeline = [];
String? fileName;
static const int bitRate = 64000;
static const int samplingRate = 44100;
@ -37,9 +41,13 @@ class RecordingDialogState extends State<RecordingDialog> {
try {
final useOpus =
await _audioRecorder.isEncoderSupported(AudioEncoder.opus);
final tempDir = await getTemporaryDirectory();
final path = _recordedPath =
'${tempDir.path}/recording${DateTime.now().microsecondsSinceEpoch}.${useOpus ? 'ogg' : 'm4a'}';
fileName =
'recording${DateTime.now().microsecondsSinceEpoch}.${useOpus ? 'ogg' : 'm4a'}';
String? path;
if (!kIsWeb) {
final tempDir = await getTemporaryDirectory();
path = path_lib.join(tempDir.path, fileName);
}
final result = await _audioRecorder.hasPermission();
if (result != true) {
@ -58,7 +66,7 @@ class RecordingDialogState extends State<RecordingDialog> {
noiseSuppress: true,
encoder: useOpus ? AudioEncoder.opus : AudioEncoder.aacLc,
),
path: path,
path: path ?? '',
);
setState(() => _duration = Duration.zero);
_recorderSubscription?.cancel();
@ -94,8 +102,8 @@ class RecordingDialogState extends State<RecordingDialog> {
void _stopAndSend() async {
_recorderSubscription?.cancel();
await _audioRecorder.stop();
final path = _recordedPath;
final path = await _audioRecorder.stop();
if (path == null) throw ('Recording failed!');
const waveCount = AudioPlayerWidget.wavesCount;
final step = amplitudeTimeline.length < waveCount
@ -110,6 +118,7 @@ class RecordingDialogState extends State<RecordingDialog> {
path: path,
duration: _duration.inMilliseconds,
waveform: waveform,
fileName: fileName,
),
);
}
@ -220,23 +229,12 @@ class RecordingResult {
final String path;
final int duration;
final List<int> waveform;
final String? fileName;
const RecordingResult({
required this.path,
required this.duration,
required this.waveform,
required this.fileName,
});
factory RecordingResult.fromJson(Map<String, dynamic> json) =>
RecordingResult(
path: json['path'],
duration: json['duration'],
waveform: List<int>.from(json['waveform']),
);
Map<String, dynamic> toJson() => {
'path': path,
'duration': duration,
'waveform': waveform,
};
}

Loading…
Cancel
Save