mirror of https://github.com/vector-im/riot-web
Add seeking and notes about clock desync
parent
9c752680ba
commit
ebb6f1b602
|
@ -31,6 +31,8 @@ limitations under the License.
|
|||
outline: none; // remove blue selection border
|
||||
position: relative; // for progress bar support later on
|
||||
|
||||
cursor: pointer;
|
||||
|
||||
&::-webkit-slider-thumb {
|
||||
appearance: none; // default style override
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ interface IState {
|
|||
@replaceableComponent("views.audio_messages.AudioPlayer")
|
||||
export default class AudioPlayer extends React.PureComponent<IProps, IState> {
|
||||
private playPauseRef: RefObject<PlayPauseButton> = createRef();
|
||||
private seekRef: RefObject<SeekBar> = createRef();
|
||||
|
||||
constructor(props: IProps) {
|
||||
super(props);
|
||||
|
@ -66,6 +67,12 @@ export default class AudioPlayer extends React.PureComponent<IProps, IState> {
|
|||
if (ev.key === Key.SPACE) {
|
||||
ev.stopPropagation();
|
||||
this.playPauseRef.current?.toggle();
|
||||
} else if (ev.key === Key.ARROW_LEFT) {
|
||||
ev.stopPropagation();
|
||||
this.seekRef.current?.left();
|
||||
} else if (ev.key === Key.ARROW_RIGHT) {
|
||||
ev.stopPropagation();
|
||||
this.seekRef.current?.right();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -46,6 +46,8 @@ interface ISeekCSS extends CSSProperties {
|
|||
'--fillTo': number;
|
||||
}
|
||||
|
||||
const ARROW_SKIP_SECONDS = 5; // arbitrary
|
||||
|
||||
@replaceableComponent("views.audio_messages.SeekBar")
|
||||
export default class SeekBar extends React.PureComponent<IProps, IState> {
|
||||
// We use an animation frame request to avoid overly spamming prop updates, even if we aren't
|
||||
|
@ -80,15 +82,21 @@ export default class SeekBar extends React.PureComponent<IProps, IState> {
|
|||
}
|
||||
|
||||
public left() {
|
||||
console.log("@@ LEFT");
|
||||
// noinspection JSIgnoredPromiseFromCall
|
||||
this.props.playback.skipTo(this.props.playback.clockInfo.timeSeconds - ARROW_SKIP_SECONDS);
|
||||
}
|
||||
|
||||
public right() {
|
||||
console.log("@@ RIGHT");
|
||||
// noinspection JSIgnoredPromiseFromCall
|
||||
this.props.playback.skipTo(this.props.playback.clockInfo.timeSeconds + ARROW_SKIP_SECONDS);
|
||||
}
|
||||
|
||||
private onChange = (ev: ChangeEvent<HTMLInputElement>) => {
|
||||
console.log('@@ CHANGE', ev.target.value);
|
||||
// Thankfully, onChange is only called when the user changes the value, not when we
|
||||
// change the value on the component. We can use this as a reliable "skip to X" function.
|
||||
//
|
||||
// noinspection JSIgnoredPromiseFromCall
|
||||
this.props.playback.skipTo(Number(ev.target.value) * this.props.playback.clockInfo.durationSeconds);
|
||||
};
|
||||
|
||||
public render(): ReactNode {
|
||||
|
|
|
@ -161,16 +161,9 @@ export class Playback extends EventEmitter implements IDestroyable {
|
|||
public async play() {
|
||||
// We can't restart a buffer source, so we need to create a new one if we hit the end
|
||||
if (this.state === PlaybackState.Stopped) {
|
||||
if (this.source) {
|
||||
this.source.disconnect();
|
||||
this.source.removeEventListener("ended", this.onPlaybackEnd);
|
||||
}
|
||||
|
||||
this.source = this.context.createBufferSource();
|
||||
this.source.connect(this.context.destination);
|
||||
this.source.buffer = this.audioBuf;
|
||||
this.source.start(); // start immediately
|
||||
this.source.addEventListener("ended", this.onPlaybackEnd);
|
||||
this.disconnectSource();
|
||||
this.makeNewSourceBuffer();
|
||||
this.source.start();
|
||||
}
|
||||
|
||||
// We use the context suspend/resume functions because it allows us to pause a source
|
||||
|
@ -180,6 +173,18 @@ export class Playback extends EventEmitter implements IDestroyable {
|
|||
this.emit(PlaybackState.Playing);
|
||||
}
|
||||
|
||||
private disconnectSource() {
|
||||
this.source?.disconnect();
|
||||
this.source?.removeEventListener("ended", this.onPlaybackEnd);
|
||||
}
|
||||
|
||||
private makeNewSourceBuffer() {
|
||||
this.source = this.context.createBufferSource();
|
||||
this.source.buffer = this.audioBuf;
|
||||
this.source.addEventListener("ended", this.onPlaybackEnd);
|
||||
this.source.connect(this.context.destination);
|
||||
}
|
||||
|
||||
public async pause() {
|
||||
await this.context.suspend();
|
||||
this.emit(PlaybackState.Paused);
|
||||
|
@ -194,4 +199,60 @@ export class Playback extends EventEmitter implements IDestroyable {
|
|||
if (this.isPlaying) await this.pause();
|
||||
else await this.play();
|
||||
}
|
||||
|
||||
public async skipTo(timeSeconds: number) {
|
||||
// Dev note: this function talks a lot about clock desyncs. There is a clock running
|
||||
// independently to the audio context and buffer so that accurate human-perceptible
|
||||
// time can be exposed. The PlaybackClock class has more information, but the short
|
||||
// version is that we need to line up the useful time (clip position) with the context
|
||||
// time, and avoid as many deviations as possible as otherwise the user could see the
|
||||
// wrong time, and we stop playback at the wrong time, etc.
|
||||
|
||||
timeSeconds = clamp(timeSeconds, 0, this.clock.durationSeconds);
|
||||
|
||||
// Track playing state so we don't cause seeking to start playing the track.
|
||||
const isPlaying = this.isPlaying;
|
||||
|
||||
if (isPlaying) {
|
||||
// Pause first so we can get an accurate measurement of time
|
||||
await this.context.suspend();
|
||||
}
|
||||
|
||||
// We can't simply tell the context/buffer to jump to a time, so we have to
|
||||
// start a whole new buffer and start it from the new time offset.
|
||||
const now = this.context.currentTime;
|
||||
this.disconnectSource();
|
||||
this.makeNewSourceBuffer();
|
||||
|
||||
// We have to resync the clock because it can get confused about where we're
|
||||
// at in the audio clip.
|
||||
this.clock.syncTo(now, timeSeconds);
|
||||
|
||||
// Always start the source to queue it up. We have to do this now (and pause
|
||||
// quickly if we're not supposed to be playing) as otherwise the clock can desync
|
||||
// when it comes time to the user hitting play. After a couple jumps, the user
|
||||
// will have desynced the clock enough to be about 10-15 seconds off, while this
|
||||
// keeps it as close to perfect as humans can perceive.
|
||||
this.source.start(now, timeSeconds);
|
||||
|
||||
// Dev note: it's critical that the code gap between `this.source.start()` and
|
||||
// `this.pause()` is as small as possible: we do not want to delay *anything*
|
||||
// as that could cause a clock desync, or a buggy feeling as a single note plays
|
||||
// during seeking.
|
||||
|
||||
if (isPlaying) {
|
||||
// If we were playing before, continue the context so the clock doesn't desync.
|
||||
await this.context.resume();
|
||||
} else {
|
||||
// As mentioned above, we'll have to pause the clip if we weren't supposed to
|
||||
// be playing it just yet. If we didn't have this, the audio clip plays but all
|
||||
// the states will be wrong: clock won't advance, pause state doesn't match the
|
||||
// blaring noise leaving the user's speakers, etc.
|
||||
//
|
||||
// Also as mentioned, if the code gap is small enough then this should be
|
||||
// executed immediately after the start time, leaving no feasible time for the
|
||||
// user's speakers to play any sound.
|
||||
await this.pause();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,42 @@ import { SimpleObservable } from "matrix-widget-api";
|
|||
import { IDestroyable } from "../utils/IDestroyable";
|
||||
import { MatrixEvent } from "matrix-js-sdk/src/models/event";
|
||||
|
||||
// Because keeping track of time is sufficiently complicated...
|
||||
/**
|
||||
* Tracks accurate human-perceptible time for an audio clip, as informed
|
||||
* by managed playback. This clock is tightly coupled with the operation
|
||||
* of the Playback class, making assumptions about how the provided
|
||||
* AudioContext will be used (suspended/resumed to preserve time, etc).
|
||||
*
|
||||
* But why do we need a clock? The AudioContext exposes time information,
|
||||
* and so does the audio buffer, but not in a way that is useful for humans
|
||||
* to perceive. The audio buffer time is often lagged behind the context
|
||||
* time due to internal processing delays of the audio API. Additionally,
|
||||
* the context's time is tracked from when it was first initialized/started,
|
||||
* not related to positioning within the clip. However, the context time
|
||||
* is the most accurate time we can use to determine position within the
|
||||
* clip if we're fast enough to track the pauses and stops.
|
||||
*
|
||||
* As a result, we track every play, pause, stop, and seek event from the
|
||||
* Playback class (kinda: it calls us, which is close enough to the same
|
||||
* thing). These events are then tracked on the AudioContext time scale,
|
||||
* with assumptions that code execution will result in negligible desync
|
||||
* of the clock, or at least no perceptible difference in time. It's
|
||||
* extremely important that the calling code, and the clock's own code,
|
||||
* is extremely fast between the event happening and the clock time being
|
||||
* tracked - anything more than a dozen milliseconds is likely to stack up
|
||||
* poorly, leading to clock desync.
|
||||
*
|
||||
* Clock desync can be dangerous for the stability of the playback controls:
|
||||
* if the clock thinks the user is somewhere else in the clip, it could
|
||||
* inform the playback of the wrong place in time, leading to dead air in
|
||||
* the output or, if severe enough, a clock that won't stop running while
|
||||
* the audio is paused/stopped. Other examples include the clip stopping at
|
||||
* 90% time due to playback ending, the clip playing from the wrong spot
|
||||
* relative to the time, and negative clock time.
|
||||
*
|
||||
* Note that the clip duration is fed to the clock: this is to ensure that
|
||||
* we have the most accurate time possible to present.
|
||||
*/
|
||||
export class PlaybackClock implements IDestroyable {
|
||||
private clipStart = 0;
|
||||
private stopped = true;
|
||||
|
@ -41,6 +76,12 @@ export class PlaybackClock implements IDestroyable {
|
|||
}
|
||||
|
||||
public get timeSeconds(): number {
|
||||
// The modulo is to ensure that we're only looking at the most recent clip
|
||||
// time, as the context is long-running and multiple plays might not be
|
||||
// informed to us (if the control is looping, for example). By taking the
|
||||
// remainder of the division operation, we're assuming that playback is
|
||||
// incomplete or stopped, thus giving an accurate position within the active
|
||||
// clip segment.
|
||||
return (this.context.currentTime - this.clipStart) % this.clipDuration;
|
||||
}
|
||||
|
||||
|
@ -49,7 +90,7 @@ export class PlaybackClock implements IDestroyable {
|
|||
}
|
||||
|
||||
private checkTime = () => {
|
||||
const now = this.timeSeconds;
|
||||
const now = this.timeSeconds; // calculated dynamically
|
||||
if (this.lastCheck !== now) {
|
||||
this.observable.update([now, this.durationSeconds]);
|
||||
this.lastCheck = now;
|
||||
|
@ -82,8 +123,9 @@ export class PlaybackClock implements IDestroyable {
|
|||
}
|
||||
|
||||
if (!this.timerId) {
|
||||
// case to number because the types are wrong
|
||||
// 100ms interval to make sure the time is as accurate as possible
|
||||
// cast to number because the types are wrong
|
||||
// 100ms interval to make sure the time is as accurate as possible without
|
||||
// being overly insane
|
||||
this.timerId = <number><any>setInterval(this.checkTime, 100);
|
||||
}
|
||||
}
|
||||
|
@ -92,6 +134,12 @@ export class PlaybackClock implements IDestroyable {
|
|||
this.stopped = true;
|
||||
}
|
||||
|
||||
public syncTo(contextTime: number, clipTime: number) {
|
||||
this.clipStart = contextTime - clipTime;
|
||||
this.stopped = false; // count as a mid-stream pause (if we were stopped)
|
||||
this.checkTime();
|
||||
}
|
||||
|
||||
public destroy() {
|
||||
this.observable.close();
|
||||
if (this.timerId) clearInterval(this.timerId);
|
||||
|
|
Loading…
Reference in New Issue