|
|
|
@ -42,10 +42,11 @@ class _RecordingDialogState extends State<RecordingDialog> {
|
|
|
|
|
path: _recordedPath, encoder: AudioEncoder.AAC);
|
|
|
|
|
setState(() => _duration = Duration.zero);
|
|
|
|
|
_recorderSubscription?.cancel();
|
|
|
|
|
_recorderSubscription = Timer.periodic(Duration(seconds: 1), (_) async {
|
|
|
|
|
_recorderSubscription =
|
|
|
|
|
Timer.periodic(Duration(milliseconds: 100), (_) async {
|
|
|
|
|
_amplitude = await _audioRecorder.getAmplitude();
|
|
|
|
|
setState(() {
|
|
|
|
|
_duration += Duration(seconds: 1);
|
|
|
|
|
_duration += Duration(milliseconds: 100);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
} catch (e, s) {
|
|
|
|
@ -71,8 +72,9 @@ class _RecordingDialogState extends State<RecordingDialog> {
|
|
|
|
|
@override
|
|
|
|
|
Widget build(BuildContext context) {
|
|
|
|
|
const maxDecibalWidth = 64.0;
|
|
|
|
|
final decibalWidth = ((_amplitude.current / _amplitude.max) * 2) *
|
|
|
|
|
(maxDecibalWidth / 4).toDouble();
|
|
|
|
|
final decibalWidth =
|
|
|
|
|
((_amplitude == null ? 0 : _amplitude.current / _amplitude.max) * 2) *
|
|
|
|
|
(maxDecibalWidth / 4).toDouble();
|
|
|
|
|
final time =
|
|
|
|
|
'${_duration.inMinutes.toString().padLeft(2, '0')}:${(_duration.inSeconds % 60).toString().padLeft(2, '0')}';
|
|
|
|
|
|
|
|
|
@ -86,7 +88,7 @@ class _RecordingDialogState extends State<RecordingDialog> {
|
|
|
|
|
height: maxDecibalWidth,
|
|
|
|
|
alignment: Alignment.center,
|
|
|
|
|
child: AnimatedContainer(
|
|
|
|
|
duration: Duration(seconds: 1),
|
|
|
|
|
duration: Duration(milliseconds: 100),
|
|
|
|
|
width: decibalWidth,
|
|
|
|
|
height: decibalWidth,
|
|
|
|
|
decoration: BoxDecoration(
|
|
|
|
|