mirror of https://github.com/mifi/lossless-cut
improve waveform and keyframes
- allow up to 1000 keyframes in buffer before recycling #563 - buffer the last 100 rendered waveform segments #260 also: - implement timeout/kill for ffprobe after 30 secpull/987/head
parent
848120da5a
commit
ac127e88ab
@ -1,43 +1,61 @@
|
||||
import { useState, useCallback, useRef, useEffect } from 'react';
|
||||
import { useState, useCallback, useRef, useEffect, useMemo } from 'react';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import useDebounceOld from 'react-use/lib/useDebounce'; // Want to phase out this
|
||||
|
||||
import { readFrames, findNearestKeyFrameTime as ffmpegFindNearestKeyFrameTime } from '../ffmpeg';
|
||||
|
||||
const maxKeyframes = 1000;
|
||||
// const maxKeyframes = 100;
|
||||
|
||||
export default ({ keyframesEnabled, filePath, commandedTime, mainVideoStream, detectedFps, ffmpegExtractWindow }) => {
|
||||
const readingKeyframesPromise = useRef();
|
||||
const [neighbouringFrames, setNeighbouringFrames] = useState([]);
|
||||
const [neighbouringKeyFramesMap, setNeighbouringKeyFrames] = useState({});
|
||||
const neighbouringKeyFrames = useMemo(() => Object.values(neighbouringKeyFramesMap), [neighbouringKeyFramesMap]);
|
||||
|
||||
const findNearestKeyFrameTime = useCallback(({ time, direction }) => ffmpegFindNearestKeyFrameTime({ frames: neighbouringFrames, time, direction, fps: detectedFps }), [neighbouringFrames, detectedFps]);
|
||||
const findNearestKeyFrameTime = useCallback(({ time, direction }) => ffmpegFindNearestKeyFrameTime({ frames: neighbouringKeyFrames, time, direction, fps: detectedFps }), [neighbouringKeyFrames, detectedFps]);
|
||||
|
||||
useEffect(() => {
|
||||
setNeighbouringFrames([]);
|
||||
}, [filePath]);
|
||||
useEffect(() => setNeighbouringKeyFrames({}), [filePath]);
|
||||
|
||||
useDebounceOld(() => {
|
||||
// We still want to calculate keyframes even if not shouldShowKeyframes because maybe we want to be able to step to the closest keyframe
|
||||
const shouldRun = () => keyframesEnabled && filePath && mainVideoStream && commandedTime != null;
|
||||
let aborted = false;
|
||||
|
||||
async function run() {
|
||||
if (!shouldRun() || readingKeyframesPromise.current) return;
|
||||
(async () => {
|
||||
// See getIntervalAroundTime
|
||||
// We still want to calculate keyframes even if not shouldShowKeyframes because maybe we want to be able to step to the closest keyframe
|
||||
const shouldRun = keyframesEnabled && filePath && mainVideoStream && commandedTime != null && !readingKeyframesPromise.current;
|
||||
if (!shouldRun) return;
|
||||
|
||||
try {
|
||||
const promise = readFrames({ filePath, aroundTime: commandedTime, stream: mainVideoStream.index, window: ffmpegExtractWindow });
|
||||
readingKeyframesPromise.current = promise;
|
||||
const newFrames = await promise;
|
||||
if (!shouldRun()) return;
|
||||
if (aborted) return;
|
||||
const newKeyFrames = newFrames.filter((frame) => frame.keyframe);
|
||||
// console.log(newFrames);
|
||||
setNeighbouringFrames(newFrames);
|
||||
setNeighbouringKeyFrames((existingKeyFramesMap) => {
|
||||
let existingFrames = Object.values(existingKeyFramesMap);
|
||||
if (existingFrames.length >= maxKeyframes) {
|
||||
existingFrames = sortBy(existingFrames, 'createdAt').slice(newKeyFrames.length);
|
||||
}
|
||||
const toObj = (map) => Object.fromEntries(map.map((frame) => [frame.time, frame]));
|
||||
return {
|
||||
...toObj(existingFrames),
|
||||
...toObj(newKeyFrames),
|
||||
};
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to read keyframes', err);
|
||||
} finally {
|
||||
readingKeyframesPromise.current = undefined;
|
||||
}
|
||||
}
|
||||
run();
|
||||
}, 500, [keyframesEnabled, filePath, commandedTime, mainVideoStream]);
|
||||
})();
|
||||
|
||||
return () => {
|
||||
aborted = true;
|
||||
};
|
||||
}, 500, [keyframesEnabled, filePath, commandedTime, mainVideoStream, ffmpegExtractWindow]);
|
||||
|
||||
return {
|
||||
neighbouringFrames, findNearestKeyFrameTime,
|
||||
neighbouringKeyFrames, findNearestKeyFrameTime,
|
||||
};
|
||||
};
|
||||
|
||||
@ -1,40 +1,61 @@
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import sortBy from 'lodash/sortBy';
|
||||
import useDebounceOld from 'react-use/lib/useDebounce'; // Want to phase out this
|
||||
import { waveformColor } from '../colors';
|
||||
|
||||
import { renderWaveformPng } from '../ffmpeg';
|
||||
|
||||
const maxWaveforms = 100;
|
||||
// const maxWaveforms = 3; // testing
|
||||
|
||||
export default ({ filePath, commandedTime, zoomedDuration, waveformEnabled, mainAudioStream, shouldShowWaveform, ffmpegExtractWindow }) => {
|
||||
const creatingWaveformPromise = useRef();
|
||||
const [waveform, setWaveform] = useState();
|
||||
|
||||
useEffect(() => {
|
||||
setWaveform();
|
||||
}, [filePath]);
|
||||
const [waveforms, setWaveforms] = useState([]);
|
||||
|
||||
useDebounceOld(() => {
|
||||
const shouldRun = () => filePath && mainAudioStream && commandedTime != null && shouldShowWaveform && waveformEnabled;
|
||||
let aborted = false;
|
||||
|
||||
(async () => {
|
||||
const alreadyHaveWaveformAtCommandedTime = waveforms.some((waveform) => waveform.from < commandedTime && waveform.to > commandedTime);
|
||||
const shouldRun = filePath && mainAudioStream && commandedTime != null && shouldShowWaveform && waveformEnabled && !alreadyHaveWaveformAtCommandedTime && !creatingWaveformPromise.current;
|
||||
if (!shouldRun) return;
|
||||
|
||||
async function run() {
|
||||
if (!shouldRun() || creatingWaveformPromise.current) return;
|
||||
try {
|
||||
const promise = renderWaveformPng({ filePath, aroundTime: commandedTime, window: ffmpegExtractWindow, color: waveformColor });
|
||||
creatingWaveformPromise.current = promise;
|
||||
if (!shouldRun()) return;
|
||||
const wf = await promise;
|
||||
setWaveform(wf);
|
||||
const newWaveform = await promise;
|
||||
if (aborted) return;
|
||||
setWaveforms((currentWaveforms) => {
|
||||
const waveformsByCreatedAt = sortBy(currentWaveforms, 'createdAt');
|
||||
return [
|
||||
// cleanup old
|
||||
...(currentWaveforms.length >= maxWaveforms ? waveformsByCreatedAt.slice(1) : waveformsByCreatedAt),
|
||||
newWaveform,
|
||||
];
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Failed to render waveform', err);
|
||||
} finally {
|
||||
creatingWaveformPromise.current = undefined;
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
return () => {
|
||||
aborted = true;
|
||||
};
|
||||
}, 500, [filePath, commandedTime, zoomedDuration, waveformEnabled, mainAudioStream, shouldShowWaveform, waveforms, ffmpegExtractWindow]);
|
||||
|
||||
run();
|
||||
}, 500, [filePath, commandedTime, zoomedDuration, waveformEnabled, mainAudioStream, shouldShowWaveform]);
|
||||
const lastWaveformsRef = useRef([]);
|
||||
useEffect(() => {
|
||||
const removedWaveforms = lastWaveformsRef.current.filter((wf) => !waveforms.includes(wf));
|
||||
// Cleanup old
|
||||
// if (removedWaveforms.length > 0) console.log('cleanup waveforms', removedWaveforms.length);
|
||||
removedWaveforms.forEach((waveform) => URL.revokeObjectURL(waveform.url));
|
||||
lastWaveformsRef.current = waveforms;
|
||||
}, [waveforms]);
|
||||
|
||||
// Cleanup old
|
||||
useEffect(() => () => waveform && URL.revokeObjectURL(waveform.url), [waveform]);
|
||||
useEffect(() => setWaveforms([]), [filePath]);
|
||||
useEffect(() => () => setWaveforms([]), []);
|
||||
|
||||
return { waveform };
|
||||
return { waveforms };
|
||||
};
|
||||
|
||||
Loading…
Reference in New Issue