dont re-prepare voice messages (#7897)
* dont reprepare voice messages Signed-off-by: Kerry Archibald <kerrya@element.io> * remove debug Signed-off-by: Kerry Archibald <kerrya@element.io> * test Playback Signed-off-by: Kerry Archibald <kerrya@element.io> * test RecordingPlayback Signed-off-by: Kerry Archibald <kerrya@element.io> * forgotten copyright Signed-off-by: Kerry Archibald <kerrya@element.io> * add comments Signed-off-by: Kerry Archibald <kerrya@element.io>pull/21833/head
parent
16e67e7716
commit
4bf42babc7
|
@ -133,6 +133,13 @@ export class Playback extends EventEmitter implements IDestroyable {
|
|||
}
|
||||
|
||||
public async prepare() {
|
||||
// don't attempt to decode the media again
|
||||
// AudioContext.decodeAudioData detaches the array buffer `this.buf`
|
||||
// meaning it cannot be re-read
|
||||
if (this.state !== PlaybackState.Decoding) {
|
||||
return;
|
||||
}
|
||||
|
||||
// The point where we use an audio element is fairly arbitrary, though we don't want
|
||||
// it to be too low. As of writing, voice messages want to show a waveform but audio
|
||||
// messages do not. Using an audio element means we can't show a waveform preview, so
|
||||
|
|
|
@ -40,8 +40,9 @@ export default abstract class AudioPlayerBase extends React.PureComponent<IProps
|
|||
constructor(props: IProps) {
|
||||
super(props);
|
||||
|
||||
// Playback instances can be reused in the composer
|
||||
this.state = {
|
||||
playbackPhase: PlaybackState.Decoding, // default assumption
|
||||
playbackPhase: this.props.playback.currentState,
|
||||
};
|
||||
|
||||
// We don't need to de-register: the class handles this for us internally
|
||||
|
|
|
@ -59,7 +59,9 @@ export default class PlayPauseButton extends React.PureComponent<IProps> {
|
|||
'mx_PlayPauseButton_pause': isPlaying,
|
||||
'mx_PlayPauseButton_disabled': isDisabled,
|
||||
});
|
||||
|
||||
return <AccessibleTooltipButton
|
||||
data-test-id='play-pause-button'
|
||||
className={classes}
|
||||
title={isPlaying ? _t("Pause") : _t("Play")}
|
||||
onClick={this.onClick}
|
||||
|
|
|
@ -36,6 +36,7 @@ export default class RecordingPlayback extends AudioPlayerBase {
|
|||
|
||||
protected renderComponent(): ReactNode {
|
||||
const shapeClass = !this.isWaveformable ? 'mx_VoiceMessagePrimaryContainer_noWaveform' : '';
|
||||
|
||||
return (
|
||||
<div className={'mx_MediaBody mx_VoiceMessagePrimaryContainer ' + shapeClass}>
|
||||
<PlayPauseButton playback={this.props.playback} playbackPhase={this.state.playbackPhase} />
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import { mocked } from 'jest-mock';
|
||||
import { logger } from 'matrix-js-sdk/src/logger';
|
||||
|
||||
import '../skinned-sdk'; // Must be first for skinning to work
|
||||
|
||||
import { createAudioContext, decodeOgg } from '../../src/audio/compat';
|
||||
import { Playback, PlaybackState } from "../../src/audio/Playback";
|
||||
|
||||
jest.mock('../../src/audio/compat', () => ({
|
||||
createAudioContext: jest.fn(),
|
||||
decodeOgg: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('Playback', () => {
|
||||
const mockAudioBufferSourceNode = {
|
||||
addEventListener: jest.fn(),
|
||||
connect: jest.fn(),
|
||||
start: jest.fn(),
|
||||
};
|
||||
const mockAudioContext = {
|
||||
decodeAudioData: jest.fn(),
|
||||
suspend: jest.fn(),
|
||||
resume: jest.fn(),
|
||||
createBufferSource: jest.fn().mockReturnValue(mockAudioBufferSourceNode),
|
||||
};
|
||||
|
||||
const mockAudioBuffer = {
|
||||
duration: 99,
|
||||
getChannelData: jest.fn(),
|
||||
};
|
||||
|
||||
const mockChannelData = new Float32Array();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.spyOn(logger, 'error').mockRestore();
|
||||
mockAudioBuffer.getChannelData.mockClear().mockReturnValue(mockChannelData);
|
||||
mockAudioContext.decodeAudioData.mockReset().mockImplementation(
|
||||
(_b, callback) => callback(mockAudioBuffer),
|
||||
);
|
||||
mockAudioContext.resume.mockClear().mockResolvedValue(undefined);
|
||||
mockAudioContext.suspend.mockClear().mockResolvedValue(undefined);
|
||||
mocked(decodeOgg).mockClear().mockResolvedValue(new ArrayBuffer(1));
|
||||
mocked(createAudioContext).mockReturnValue(mockAudioContext as unknown as AudioContext);
|
||||
});
|
||||
|
||||
it('initialises correctly', () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
|
||||
const playback = new Playback(buffer);
|
||||
|
||||
expect(playback.sizeBytes).toEqual(8);
|
||||
expect(playback.clockInfo).toBeTruthy();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Decoding);
|
||||
});
|
||||
|
||||
it('toggles playback on from stopped state', async () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const playback = new Playback(buffer);
|
||||
await playback.prepare();
|
||||
// state is Stopped
|
||||
await playback.toggle();
|
||||
|
||||
expect(mockAudioBufferSourceNode.start).toHaveBeenCalled();
|
||||
expect(mockAudioContext.resume).toHaveBeenCalled();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Playing);
|
||||
});
|
||||
|
||||
it('toggles playback to paused from playing state', async () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const playback = new Playback(buffer);
|
||||
await playback.prepare();
|
||||
await playback.toggle();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Playing);
|
||||
|
||||
await playback.toggle();
|
||||
|
||||
expect(mockAudioContext.suspend).toHaveBeenCalled();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Paused);
|
||||
});
|
||||
|
||||
it('stop playbacks', async () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const playback = new Playback(buffer);
|
||||
await playback.prepare();
|
||||
await playback.toggle();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Playing);
|
||||
|
||||
await playback.stop();
|
||||
|
||||
expect(mockAudioContext.suspend).toHaveBeenCalled();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Stopped);
|
||||
});
|
||||
|
||||
describe('prepare()', () => {
|
||||
it('decodes audio data when not greater than 5mb', async () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
|
||||
const playback = new Playback(buffer);
|
||||
|
||||
await playback.prepare();
|
||||
|
||||
expect(mockAudioContext.decodeAudioData).toHaveBeenCalledTimes(1);
|
||||
expect(mockAudioBuffer.getChannelData).toHaveBeenCalledWith(0);
|
||||
|
||||
// clock was updated
|
||||
expect(playback.clockInfo.durationSeconds).toEqual(mockAudioBuffer.duration);
|
||||
|
||||
expect(playback.currentState).toEqual(PlaybackState.Stopped);
|
||||
});
|
||||
|
||||
it('tries to decode ogg when decodeAudioData fails', async () => {
|
||||
// stub logger to keep console clean from expected error
|
||||
jest.spyOn(logger, 'error').mockReturnValue(undefined);
|
||||
jest.spyOn(logger, 'warn').mockReturnValue(undefined);
|
||||
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const decodingError = new Error('test');
|
||||
mockAudioContext.decodeAudioData.mockImplementationOnce(
|
||||
(_b, _callback, error) => error(decodingError),
|
||||
).mockImplementationOnce(
|
||||
(_b, callback) => callback(mockAudioBuffer),
|
||||
);
|
||||
|
||||
const playback = new Playback(buffer);
|
||||
|
||||
await playback.prepare();
|
||||
|
||||
expect(mockAudioContext.decodeAudioData).toHaveBeenCalledTimes(2);
|
||||
expect(decodeOgg).toHaveBeenCalled();
|
||||
|
||||
// clock was updated
|
||||
expect(playback.clockInfo.durationSeconds).toEqual(mockAudioBuffer.duration);
|
||||
|
||||
expect(playback.currentState).toEqual(PlaybackState.Stopped);
|
||||
});
|
||||
|
||||
it('does not try to re-decode audio', async () => {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
const playback = new Playback(buffer);
|
||||
await playback.prepare();
|
||||
expect(playback.currentState).toEqual(PlaybackState.Stopped);
|
||||
|
||||
await playback.prepare();
|
||||
|
||||
// only called once in first prepare
|
||||
expect(mockAudioContext.decodeAudioData).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { mocked } from 'jest-mock';
|
||||
import { logger } from 'matrix-js-sdk/src/logger';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
|
||||
import '../../../skinned-sdk';
|
||||
import RecordingPlayback from '../../../../src/components/views/audio_messages/RecordingPlayback';
|
||||
import { Playback } from '../../../../src/audio/Playback';
|
||||
import RoomContext, { TimelineRenderingType } from '../../../../src/contexts/RoomContext';
|
||||
import { createAudioContext } from '../../../../src/audio/compat';
|
||||
import { findByTestId, flushPromises } from '../../../test-utils';
|
||||
import PlaybackWaveform from '../../../../src/components/views/audio_messages/PlaybackWaveform';
|
||||
|
||||
jest.mock('../../../../src/audio/compat', () => ({
|
||||
createAudioContext: jest.fn(),
|
||||
decodeOgg: jest.fn().mockResolvedValue({}),
|
||||
}));
|
||||
|
||||
describe('<RecordingPlayback />', () => {
|
||||
const mockAudioBufferSourceNode = {
|
||||
addEventListener: jest.fn(),
|
||||
connect: jest.fn(),
|
||||
start: jest.fn(),
|
||||
};
|
||||
|
||||
const mockAudioContext = {
|
||||
decodeAudioData: jest.fn(),
|
||||
suspend: jest.fn(),
|
||||
resume: jest.fn(),
|
||||
currentTime: 0,
|
||||
createBufferSource: jest.fn().mockReturnValue(mockAudioBufferSourceNode),
|
||||
};
|
||||
|
||||
const mockAudioBuffer = {
|
||||
duration: 99,
|
||||
getChannelData: jest.fn(),
|
||||
};
|
||||
|
||||
const mockChannelData = new Float32Array();
|
||||
|
||||
const defaultRoom = { roomId: '!room:server.org', timelineRenderingType: TimelineRenderingType.File };
|
||||
const getComponent = (props: { playback: Playback }, room = defaultRoom) =>
|
||||
mount(<RecordingPlayback {...props} />, {
|
||||
wrappingComponent: RoomContext.Provider,
|
||||
wrappingComponentProps: { value: room },
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.spyOn(logger, 'error').mockRestore();
|
||||
mockAudioBuffer.getChannelData.mockClear().mockReturnValue(mockChannelData);
|
||||
mockAudioContext.decodeAudioData.mockReset().mockImplementation(
|
||||
(_b, callback) => callback(mockAudioBuffer),
|
||||
);
|
||||
mocked(createAudioContext).mockReturnValue(mockAudioContext as unknown as AudioContext);
|
||||
});
|
||||
|
||||
const getPlayButton = component => findByTestId(component, 'play-pause-button').at(0);
|
||||
|
||||
it('renders recording playback', () => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const component = getComponent({ playback });
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
|
||||
it('disables play button while playback is decoding', async () => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const component = getComponent({ playback });
|
||||
expect(getPlayButton(component).props().disabled).toBeTruthy();
|
||||
});
|
||||
|
||||
it('enables play button when playback is finished decoding', async () => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const component = getComponent({ playback });
|
||||
await flushPromises();
|
||||
component.setProps({});
|
||||
expect(getPlayButton(component).props().disabled).toBeFalsy();
|
||||
});
|
||||
|
||||
it('displays error when playback decoding fails', async () => {
|
||||
// stub logger to keep console clean from expected error
|
||||
jest.spyOn(logger, 'error').mockReturnValue(undefined);
|
||||
jest.spyOn(logger, 'warn').mockReturnValue(undefined);
|
||||
mockAudioContext.decodeAudioData.mockImplementation(
|
||||
(_b, _cb, error) => error(new Error('oh no')),
|
||||
);
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const component = getComponent({ playback });
|
||||
await flushPromises();
|
||||
expect(component.find('.text-warning').length).toBeFalsy();
|
||||
});
|
||||
|
||||
it('displays pre-prepared playback with correct playback phase', async () => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
await playback.prepare();
|
||||
const component = getComponent({ playback });
|
||||
// playback already decoded, button is not disabled
|
||||
expect(getPlayButton(component).props().disabled).toBeFalsy();
|
||||
expect(component.find('.text-warning').length).toBeFalsy();
|
||||
});
|
||||
|
||||
it('toggles playback on play pause button click', async () => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
jest.spyOn(playback, 'toggle').mockResolvedValue(undefined);
|
||||
await playback.prepare();
|
||||
const component = getComponent({ playback });
|
||||
|
||||
act(() => {
|
||||
getPlayButton(component).simulate('click');
|
||||
});
|
||||
|
||||
expect(playback.toggle).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[TimelineRenderingType.Notification],
|
||||
[TimelineRenderingType.File],
|
||||
[TimelineRenderingType.Pinned],
|
||||
])('does not render waveform when timeline rendering type for room is %s', (timelineRenderingType) => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const room = {
|
||||
...defaultRoom,
|
||||
timelineRenderingType,
|
||||
};
|
||||
const component = getComponent({ playback }, room);
|
||||
|
||||
expect(component.find(PlaybackWaveform).length).toBeFalsy();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[TimelineRenderingType.Room],
|
||||
[TimelineRenderingType.Thread],
|
||||
[TimelineRenderingType.ThreadsList],
|
||||
[TimelineRenderingType.Search],
|
||||
])('renders waveform when timeline rendering type for room is %s', (timelineRenderingType) => {
|
||||
const playback = new Playback(new ArrayBuffer(8));
|
||||
const room = {
|
||||
...defaultRoom,
|
||||
timelineRenderingType,
|
||||
};
|
||||
const component = getComponent({ playback }, room);
|
||||
|
||||
expect(component.find(PlaybackWaveform).length).toBeTruthy();
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue