Cap recording length, and warn at 10s remaining

See diff for details. Note that this introduces an "Uploading" state which is not currently used.

At the moment, if a user hits the maximum time then their recording will be broken. This is expected to be fixed in a future PR.
pull/21833/head
Travis Ralston 2021-04-14 21:15:06 -06:00
parent 22219e0e80
commit 0677cf866c
4 changed files with 69 additions and 8 deletions

View File

@ -35,6 +35,8 @@ import ActiveWidgetStore from "../../../stores/ActiveWidgetStore";
import {replaceableComponent} from "../../../utils/replaceableComponent";
import VoiceRecordComposerTile from "./VoiceRecordComposerTile";
import {VoiceRecordingStore} from "../../../stores/VoiceRecordingStore";
import {RecordingState} from "../../../voice/VoiceRecording";
import Tooltip, {Alignment} from "../elements/Tooltip";
function ComposerAvatar(props) {
const MemberStatusMessageAvatar = sdk.getComponent('avatars.MemberStatusMessageAvatar');
@ -191,6 +193,7 @@ export default class MessageComposer extends React.Component {
joinedConference: WidgetStore.instance.isJoinedToConferenceIn(this.props.room),
isComposerEmpty: true,
haveRecording: false,
recordingTimeLeftSeconds: null, // when set to a number, shows a toast
};
}
@ -331,7 +334,17 @@ export default class MessageComposer extends React.Component {
}
_onVoiceStoreUpdate = () => {
this.setState({haveRecording: !!VoiceRecordingStore.instance.activeRecording});
const recording = VoiceRecordingStore.instance.activeRecording;
this.setState({haveRecording: !!recording});
if (recording) {
// We show a little head's up that the recording is about to automatically end soon. The 3s
// display time is completely arbitrary. Note that we don't need to deregister the listener
// because the recording instance will clean that up for us.
recording.on(RecordingState.EndingSoon, ({secondsLeft}) => {
this.setState({recordingTimeLeftSeconds: secondsLeft});
setTimeout(() => this.setState({recordingTimeLeftSeconds: null}), 3000);
});
}
};
render() {
@ -412,8 +425,18 @@ export default class MessageComposer extends React.Component {
);
}
let recordingTooltip;
const secondsLeft = Math.round(this.state.recordingTimeLeftSeconds);
if (secondsLeft) {
recordingTooltip = <Tooltip
label={_t("%(seconds)ss left", {seconds: secondsLeft})}
alignment={Alignment.Top} yOffset={-50}
/>;
}
return (
<div className="mx_MessageComposer mx_GroupLayout">
{recordingTooltip}
<div className="mx_MessageComposer_wrapper">
<ReplyPreview permalinkCreator={this.props.permalinkCreator} />
<div className="mx_MessageComposer_row">

View File

@ -1473,6 +1473,7 @@
"The conversation continues here.": "The conversation continues here.",
"This room has been replaced and is no longer active.": "This room has been replaced and is no longer active.",
"You do not have permission to post to this room": "You do not have permission to post to this room",
"%(seconds)ss left": "%(seconds)ss left",
"Bold": "Bold",
"Italics": "Italics",
"Strikethrough": "Strikethrough",

View File

@ -73,9 +73,7 @@ export class VoiceRecordingStore extends AsyncStoreWithClient<IState> {
*/
public disposeRecording(): Promise<void> {
if (this.state.recording) {
// Stop for good measure, but completely async because we're not concerned with this
// passing or failing.
this.state.recording.stop().catch(e => console.error("Error stopping recording", e));
this.state.recording.destroy(); // stops internally
}
return this.updateState({recording: null});
}

View File

@ -20,17 +20,29 @@ import {MatrixClient} from "matrix-js-sdk/src/client";
import CallMediaHandler from "../CallMediaHandler";
import {SimpleObservable} from "matrix-widget-api";
import {clamp} from "../utils/numbers";
import EventEmitter from "events";
import {IDestroyable} from "../utils/IDestroyable";
const CHANNELS = 1; // stereo isn't important
const SAMPLE_RATE = 48000; // 48khz is what WebRTC uses. 12khz is where we lose quality.
const BITRATE = 24000; // 24kbps is pretty high quality for our use case in opus.
const TARGET_MAX_LENGTH = 120; // 2 minutes in seconds. Somewhat arbitrary, though longer == larger files.
const TARGET_WARN_TIME_LEFT = 10; // 10 seconds, also somewhat arbitrary.
export interface IRecordingUpdate {
waveform: number[]; // floating points between 0 (low) and 1 (high).
timeSeconds: number; // float
}
export class VoiceRecording {
export enum RecordingState {
Started = "started",
EndingSoon = "ending_soon", // emits an object with a single numerical value: secondsLeft
Ended = "ended",
Uploading = "uploading",
Uploaded = "uploaded",
}
export class VoiceRecording extends EventEmitter implements IDestroyable {
private recorder: Recorder;
private recorderContext: AudioContext;
private recorderSource: MediaStreamAudioSourceNode;
@ -40,9 +52,12 @@ export class VoiceRecording {
private buffer = new Uint8Array(0);
private mxc: string;
private recording = false;
private stopping = false;
private haveWarned = false; // whether or not EndingSoon has been fired
private observable: SimpleObservable<IRecordingUpdate>;
public constructor(private client: MatrixClient) {
super();
}
private async makeRecorder() {
@ -124,7 +139,7 @@ export class VoiceRecording {
return this.mxc;
}
private tryUpdateLiveData = (ev: AudioProcessingEvent) => {
private processAudioUpdate = (ev: AudioProcessingEvent) => {
if (!this.recording) return;
// The time domain is the input to the FFT, which means we use an array of the same
@ -150,6 +165,17 @@ export class VoiceRecording {
waveform: translatedData,
timeSeconds: ev.playbackTime,
});
// Now that we've updated the data/waveform, let's do a time check. We don't want to
// go horribly over the limit. We also emit a warning state if needed.
const secondsLeft = TARGET_MAX_LENGTH - ev.playbackTime;
if (secondsLeft <= 0) {
// noinspection JSIgnoredPromiseFromCall - we aren't concerned with it overlapping
this.stop();
} else if (secondsLeft <= TARGET_WARN_TIME_LEFT && !this.haveWarned) {
this.emit(RecordingState.EndingSoon, {secondsLeft});
this.haveWarned = true;
}
};
public async start(): Promise<void> {
@ -164,9 +190,10 @@ export class VoiceRecording {
}
this.observable = new SimpleObservable<IRecordingUpdate>();
await this.makeRecorder();
this.recorderProcessor.addEventListener("audioprocess", this.tryUpdateLiveData);
this.recorderProcessor.addEventListener("audioprocess", this.processAudioUpdate);
await this.recorder.start();
this.recording = true;
this.emit(RecordingState.Started);
}
public async stop(): Promise<Uint8Array> {
@ -174,6 +201,9 @@ export class VoiceRecording {
throw new Error("No recording to stop");
}
if (this.stopping) return;
this.stopping = true;
// Disconnect the source early to start shutting down resources
this.recorderSource.disconnect();
await this.recorder.stop();
@ -187,12 +217,19 @@ export class VoiceRecording {
// Finally do our post-processing and clean up
this.recording = false;
this.recorderProcessor.removeEventListener("audioprocess", this.tryUpdateLiveData);
this.recorderProcessor.removeEventListener("audioprocess", this.processAudioUpdate);
await this.recorder.close();
this.emit(RecordingState.Ended);
return this.buffer;
}
public destroy() {
// noinspection JSIgnoredPromiseFromCall - not concerned about stop() being called async here
this.stop();
this.removeAllListeners();
}
public async upload(): Promise<string> {
if (!this.hasRecording) {
throw new Error("No recording available to upload");
@ -200,11 +237,13 @@ export class VoiceRecording {
if (this.mxc) return this.mxc;
this.emit(RecordingState.Uploading);
this.mxc = await this.client.uploadContent(new Blob([this.buffer], {
type: "audio/ogg",
}), {
onlyContentUri: false, // to stop the warnings in the console
}).then(r => r['content_uri']);
this.emit(RecordingState.Uploaded);
return this.mxc;
}
}