Remove abandoned Voice Broadcasts labs flag (#28548)

* Remove abandoned Voice Broadcasts labs flag

Any existing voice broadcasts will be shown as a series of voice messages which will sequence play as normal

Signed-off-by: Michael Telatynski <7t3chguy@gmail.com>

* Remove dead code

Signed-off-by: Michael Telatynski <7t3chguy@gmail.com>

* Update snapshots

Signed-off-by: Michael Telatynski <7t3chguy@gmail.com>

---------

Signed-off-by: Michael Telatynski <7t3chguy@gmail.com>
toger5/guest-link-room-access-prompt
Michael Telatynski 2024-12-02 10:53:27 +00:00 committed by GitHub
parent 5d72735b1f
commit d8ebc68aa8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
174 changed files with 29 additions and 13632 deletions

View File

@ -592,4 +592,3 @@ The following are undocumented or intended for developer use only.
2. `sync_timeline_limit`
3. `dangerously_allow_unsafe_and_insecure_passwords`
4. `latex_maths_delims`: An optional setting to override the default delimiters used for maths parsing. See https://github.com/matrix-org/matrix-react-sdk/pull/5939 for details. Only used when `feature_latex_maths` is enabled.
5. `voice_broadcast.chunk_length`: Target chunk length in seconds for the Voice Broadcast feature currently under development.

View File

@ -393,9 +393,3 @@
@import "./views/voip/_LegacyCallViewHeader.pcss";
@import "./views/voip/_LegacyCallViewSidebar.pcss";
@import "./views/voip/_VideoFeed.pcss";
@import "./voice-broadcast/atoms/_LiveBadge.pcss";
@import "./voice-broadcast/atoms/_VoiceBroadcastControl.pcss";
@import "./voice-broadcast/atoms/_VoiceBroadcastHeader.pcss";
@import "./voice-broadcast/atoms/_VoiceBroadcastRecordingConnectionError.pcss";
@import "./voice-broadcast/atoms/_VoiceBroadcastRoomSubtitle.pcss";
@import "./voice-broadcast/molecules/_VoiceBroadcastBody.pcss";

View File

@ -22,20 +22,6 @@ Please see LICENSE files in the repository root for full details.
pointer-events: none; /* makes the avatar non-draggable */
}
}
.mx_UserMenu_userAvatarLive {
align-items: center;
background-color: $alert;
border-radius: 6px;
color: $live-badge-color;
display: flex;
height: 12px;
justify-content: center;
left: 25px;
position: absolute;
top: 20px;
width: 12px;
}
}
.mx_UserMenu_contextMenuButton {

View File

@ -256,10 +256,6 @@ Please see LICENSE files in the repository root for full details.
mask-image: url("@vector-im/compound-design-tokens/icons/mic-on-solid.svg");
}
.mx_MessageComposer_voiceBroadcast::before {
mask-image: url("$(res)/img/element-icons/live.svg");
}
.mx_MessageComposer_plain_text::before {
mask-image: url("$(res)/img/element-icons/room/composer/plain_text.svg");
}

View File

@ -1,23 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_LiveBadge {
align-items: center;
background-color: $alert;
border-radius: 2px;
color: $live-badge-color;
display: inline-flex;
font-size: $font-12px;
font-weight: var(--cpd-font-weight-semibold);
gap: $spacing-4;
padding: 2px 4px;
}
.mx_LiveBadge--grey {
background-color: $quaternary-content;
}

View File

@ -1,28 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_VoiceBroadcastControl {
align-items: center;
background-color: $background;
border-radius: 50%;
color: $secondary-content;
display: flex;
flex: 0 0 32px;
height: 32px;
justify-content: center;
width: 32px;
}
.mx_VoiceBroadcastControl-recording {
color: $alert;
}
.mx_VoiceBroadcastControl-play .mx_Icon {
left: 1px;
position: relative;
}

View File

@ -1,60 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_VoiceBroadcastHeader {
align-items: flex-start;
display: flex;
gap: $spacing-8;
line-height: 20px;
margin-bottom: $spacing-16;
min-width: 0;
}
.mx_VoiceBroadcastHeader_content {
flex-grow: 1;
min-width: 0;
}
.mx_VoiceBroadcastHeader_room_wrapper {
align-items: center;
display: flex;
gap: 4px;
justify-content: flex-start;
}
.mx_VoiceBroadcastHeader_room {
font-size: $font-12px;
font-weight: var(--cpd-font-weight-semibold);
min-width: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.mx_VoiceBroadcastHeader_line {
align-items: center;
color: $secondary-content;
font-size: $font-12px;
display: flex;
gap: $spacing-4;
.mx_Spinner {
flex: 0 0 14px;
padding: 1px;
}
span {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
}
.mx_VoiceBroadcastHeader_mic--clickable {
cursor: pointer;
}

View File

@ -1,18 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_VoiceBroadcastRecordingConnectionError {
align-items: center;
color: $alert;
display: flex;
gap: $spacing-12;
svg path {
fill: $alert;
}
}

View File

@ -1,14 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_RoomTile .mx_RoomTile_titleContainer .mx_RoomTile_subtitle.mx_RoomTile_subtitle--voice-broadcast {
align-items: center;
color: $alert;
display: flex;
gap: $spacing-4;
}

View File

@ -1,75 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
.mx_VoiceBroadcastBody {
background-color: $quinary-content;
border-radius: 8px;
color: $secondary-content;
display: inline-block;
font-size: $font-12px;
padding: $spacing-12;
width: 271px;
.mx_Clock {
line-height: 1;
}
}
.mx_VoiceBroadcastBody--pip {
background-color: $system;
box-shadow: 0 2px 8px 0 #0000004a;
}
.mx_VoiceBroadcastBody--small {
display: flex;
gap: $spacing-8;
width: 192px;
.mx_VoiceBroadcastHeader {
margin-bottom: 0;
}
.mx_VoiceBroadcastControl {
align-self: center;
}
.mx_LiveBadge {
margin-top: 4px;
}
}
.mx_VoiceBroadcastBody_divider {
background-color: $quinary-content;
border: 0;
height: 1px;
margin: $spacing-12 0;
}
.mx_VoiceBroadcastBody_controls {
align-items: center;
display: flex;
gap: $spacing-32;
justify-content: center;
margin-bottom: $spacing-8;
}
.mx_VoiceBroadcastBody_timerow {
display: flex;
justify-content: space-between;
}
.mx_AccessibleButton.mx_VoiceBroadcastBody_blockButton {
display: flex;
gap: $spacing-8;
}
.mx_VoiceBroadcastBody__small-close {
right: 8px;
position: absolute;
top: 8px;
}

View File

@ -240,11 +240,6 @@ $location-live-secondary-color: #deddfd;
}
/* ******************** */
/* Voice Broadcast */
/* ******************** */
$live-badge-color: #ffffff;
/* ******************** */
/* One-off colors */
/* ******************** */
$progressbar-bg-color: var(--cpd-color-gray-200);

View File

@ -226,11 +226,6 @@ $location-live-color: #5c56f5;
$location-live-secondary-color: #deddfd;
/* ******************** */
/* Voice Broadcast */
/* ******************** */
$live-badge-color: #ffffff;
/* ******************** */
body {
color-scheme: dark;
}

View File

@ -325,11 +325,6 @@ $location-live-color: #5c56f5;
$location-live-secondary-color: #deddfd;
/* ******************** */
/* Voice Broadcast */
/* ******************** */
$live-badge-color: #ffffff;
/* ******************** */
body {
color-scheme: light;
}

View File

@ -355,11 +355,6 @@ $location-live-color: var(--cpd-color-purple-900);
$location-live-secondary-color: var(--cpd-color-purple-600);
/* ******************** */
/* Voice Broadcast */
/* ******************** */
$live-badge-color: var(--cpd-color-icon-on-solid-primary);
/* ******************** */
body {
color-scheme: light;
}

View File

@ -10,7 +10,6 @@ import type { IWidget } from "matrix-widget-api";
import type { BLURHASH_FIELD } from "../utils/image-media";
import type { JitsiCallMemberEventType, JitsiCallMemberContent } from "../call-types";
import type { ILayoutStateEvent, WIDGET_LAYOUT_EVENT_TYPE } from "../stores/widgets/types";
import type { VoiceBroadcastInfoEventContent, VoiceBroadcastInfoEventType } from "../voice-broadcast/types";
import type { EncryptedFile } from "matrix-js-sdk/src/types";
// Extend Matrix JS SDK types via Typescript declaration merging to support unspecced event fields and types
@ -37,9 +36,6 @@ declare module "matrix-js-sdk/src/types" {
"im.vector.modular.widgets": IWidget | {};
[WIDGET_LAYOUT_EVENT_TYPE]: ILayoutStateEvent;
// Unstable voice broadcast state events
[VoiceBroadcastInfoEventType]: VoiceBroadcastInfoEventContent;
// Element custom state events
"im.vector.web.settings": Record<string, any>;
"org.matrix.room.preview_urls": { disable: boolean };
@ -78,7 +74,5 @@ declare module "matrix-js-sdk/src/types" {
waveform?: number[];
};
"org.matrix.msc3245.voice"?: {};
"io.element.voice_broadcast_chunk"?: { sequence: number };
}
}

View File

@ -175,13 +175,6 @@ export interface IConfigOptions {
sync_timeline_limit?: number;
dangerously_allow_unsafe_and_insecure_passwords?: boolean; // developer option
voice_broadcast?: {
// length per voice chunk in seconds
chunk_length?: number;
// max voice broadcast length in seconds
max_length?: number;
};
user_notice?: {
title: string;
description: string;

View File

@ -55,8 +55,6 @@ import { OpenInviteDialogPayload } from "./dispatcher/payloads/OpenInviteDialogP
import { findDMForUser } from "./utils/dm/findDMForUser";
import { getJoinedNonFunctionalMembers } from "./utils/room/getJoinedNonFunctionalMembers";
import { localNotificationsAreSilenced } from "./utils/notifications";
import { SdkContextClass } from "./contexts/SDKContext";
import { showCantStartACallDialog } from "./voice-broadcast/utils/showCantStartACallDialog";
import { isNotNull } from "./Typeguards";
import { BackgroundAudio } from "./audio/BackgroundAudio";
import { Jitsi } from "./widgets/Jitsi.ts";
@ -859,15 +857,6 @@ export default class LegacyCallHandler extends EventEmitter {
return;
}
// Pause current broadcast, if any
SdkContextClass.instance.voiceBroadcastPlaybacksStore.getCurrent()?.pause();
if (SdkContextClass.instance.voiceBroadcastRecordingsStore.getCurrent()) {
// Do not start a call, if recording a broadcast
showCantStartACallDialog();
return;
}
// We might be using managed hybrid widgets
if (isManagedHybridWidgetEnabled(room)) {
await addManagedHybridWidget(room);

View File

@ -49,8 +49,6 @@ import { SdkContextClass } from "./contexts/SDKContext";
import { localNotificationsAreSilenced, createLocalNotificationSettingsIfNeeded } from "./utils/notifications";
import { getIncomingCallToastKey, IncomingCallToast } from "./toasts/IncomingCallToast";
import ToastStore from "./stores/ToastStore";
import { VoiceBroadcastChunkEventType, VoiceBroadcastInfoEventType } from "./voice-broadcast";
import { getSenderName } from "./utils/event/getSenderName";
import { stripPlainReply } from "./utils/Reply";
import { BackgroundAudio } from "./audio/BackgroundAudio";
@ -81,17 +79,6 @@ const msgTypeHandlers: Record<string, (event: MatrixEvent) => string | null> = {
return TextForEvent.textForLocationEvent(event)();
},
[MsgType.Audio]: (event: MatrixEvent): string | null => {
if (event.getContent()?.[VoiceBroadcastChunkEventType]) {
if (event.getContent()?.[VoiceBroadcastChunkEventType]?.sequence === 1) {
// Show a notification for the first broadcast chunk.
// At this point a user received something to listen to.
return _t("notifier|io.element.voice_broadcast_chunk", { senderName: getSenderName(event) });
}
// Mute other broadcast chunks
return null;
}
return TextForEvent.textForEvent(event, MatrixClientPeg.safeGet());
},
};
@ -460,8 +447,6 @@ class NotifierClass extends TypedEventEmitter<keyof EmittedEvents, EmittedEvents
// XXX: exported for tests
public evaluateEvent(ev: MatrixEvent): void {
// Mute notifications for broadcast info events
if (ev.getType() === VoiceBroadcastInfoEventType) return;
let roomId = ev.getRoomId()!;
if (LegacyCallHandler.instance.getSupportsVirtualRooms()) {
// Attempt to translate a virtual room to a native one

View File

@ -46,10 +46,6 @@ export const DEFAULTS: DeepReadonly<IConfigOptions> = {
logo: require("../res/img/element-desktop-logo.svg").default,
url: "https://element.io/get-started",
},
voice_broadcast: {
chunk_length: 2 * 60, // two minutes
max_length: 4 * 60 * 60, // four hours
},
feedback: {
existing_issues_url:

View File

@ -36,7 +36,6 @@ import AccessibleButton from "./components/views/elements/AccessibleButton";
import RightPanelStore from "./stores/right-panel/RightPanelStore";
import { highlightEvent, isLocationEvent } from "./utils/EventUtils";
import { ElementCall } from "./models/Call";
import { textForVoiceBroadcastStoppedEvent, VoiceBroadcastInfoEventType } from "./voice-broadcast";
import { getSenderName } from "./utils/event/getSenderName";
import PosthogTrackers from "./PosthogTrackers.ts";
@ -906,7 +905,6 @@ const stateHandlers: IHandlers = {
// TODO: Enable support for m.widget event type (https://github.com/vector-im/element-web/issues/13111)
"im.vector.modular.widgets": textForWidgetEvent,
[WIDGET_LAYOUT_EVENT_TYPE]: textForWidgetLayoutEvent,
[VoiceBroadcastInfoEventType]: textForVoiceBroadcastStoppedEvent,
};
// Add all the Mjolnir stuff to the renderer

View File

@ -119,7 +119,6 @@ import { ValidatedServerConfig } from "../../utils/ValidatedServerConfig";
import { isLocalRoom } from "../../utils/localRoom/isLocalRoom";
import { SDKContext, SdkContextClass } from "../../contexts/SDKContext";
import { viewUserDeviceSettings } from "../../actions/handlers/viewUserDeviceSettings";
import { cleanUpBroadcasts, VoiceBroadcastResumer } from "../../voice-broadcast";
import GenericToast from "../views/toasts/GenericToast";
import RovingSpotlightDialog from "../views/dialogs/spotlight/SpotlightDialog";
import { findDMForUser } from "../../utils/dm/findDMForUser";
@ -227,7 +226,6 @@ export default class MatrixChat extends React.PureComponent<IProps, IState> {
private focusNext: FocusNextType;
private subTitleStatus: string;
private prevWindowWidth: number;
private voiceBroadcastResumer?: VoiceBroadcastResumer;
private readonly loggedInView = createRef<LoggedInViewType>();
private dispatcherRef?: string;
@ -501,7 +499,6 @@ export default class MatrixChat extends React.PureComponent<IProps, IState> {
window.removeEventListener("resize", this.onWindowResized);
this.stores.accountPasswordStore.clearPassword();
this.voiceBroadcastResumer?.destroy();
}
private onWindowResized = (): void => {
@ -651,10 +648,9 @@ export default class MatrixChat extends React.PureComponent<IProps, IState> {
break;
case "logout":
LegacyCallHandler.instance.hangupAllCalls();
Promise.all([
...[...CallStore.instance.connectedCalls].map((call) => call.disconnect()),
cleanUpBroadcasts(this.stores),
]).finally(() => Lifecycle.logout(this.stores.oidcClientStore));
Promise.all([...[...CallStore.instance.connectedCalls].map((call) => call.disconnect())]).finally(() =>
Lifecycle.logout(this.stores.oidcClientStore),
);
break;
case "require_registration":
startAnyRegistrationFlow(payload as any);
@ -1679,8 +1675,6 @@ export default class MatrixChat extends React.PureComponent<IProps, IState> {
});
}
});
this.voiceBroadcastResumer = new VoiceBroadcastResumer(cli);
}
/**

View File

@ -6,7 +6,7 @@ SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { MutableRefObject, ReactNode, useContext, useRef } from "react";
import React, { MutableRefObject, ReactNode, useRef } from "react";
import { CallEvent, CallState, MatrixCall } from "matrix-js-sdk/src/webrtc/call";
import { logger } from "matrix-js-sdk/src/logger";
import { Optional } from "matrix-events-sdk";
@ -21,19 +21,7 @@ import { WidgetLayoutStore } from "../../stores/widgets/WidgetLayoutStore";
import ActiveWidgetStore, { ActiveWidgetStoreEvent } from "../../stores/ActiveWidgetStore";
import { ViewRoomPayload } from "../../dispatcher/payloads/ViewRoomPayload";
import { UPDATE_EVENT } from "../../stores/AsyncStore";
import { SDKContext, SdkContextClass } from "../../contexts/SDKContext";
import {
useCurrentVoiceBroadcastPreRecording,
useCurrentVoiceBroadcastRecording,
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackBody,
VoiceBroadcastPreRecording,
VoiceBroadcastPreRecordingPip,
VoiceBroadcastRecording,
VoiceBroadcastRecordingPip,
VoiceBroadcastSmallPlaybackBody,
} from "../../voice-broadcast";
import { useCurrentVoiceBroadcastPlayback } from "../../voice-broadcast/hooks/useCurrentVoiceBroadcastPlayback";
import { SdkContextClass } from "../../contexts/SDKContext";
import { WidgetPip } from "../views/pips/WidgetPip";
const SHOW_CALL_IN_STATES = [
@ -46,9 +34,6 @@ const SHOW_CALL_IN_STATES = [
];
interface IProps {
voiceBroadcastRecording: Optional<VoiceBroadcastRecording>;
voiceBroadcastPreRecording: Optional<VoiceBroadcastPreRecording>;
voiceBroadcastPlayback: Optional<VoiceBroadcastPlayback>;
movePersistedElement: MutableRefObject<(() => void) | undefined>;
}
@ -245,52 +230,9 @@ class PipContainerInner extends React.Component<IProps, IState> {
this.setState({ showWidgetInPip, persistentWidgetId, persistentRoomId });
}
private createVoiceBroadcastPlaybackPipContent(voiceBroadcastPlayback: VoiceBroadcastPlayback): CreatePipChildren {
const content =
this.state.viewedRoomId === voiceBroadcastPlayback.infoEvent.getRoomId() ? (
<VoiceBroadcastPlaybackBody playback={voiceBroadcastPlayback} pip={true} />
) : (
<VoiceBroadcastSmallPlaybackBody playback={voiceBroadcastPlayback} />
);
return ({ onStartMoving }) => (
<div key={`vb-playback-${voiceBroadcastPlayback.infoEvent.getId()}`} onMouseDown={onStartMoving}>
{content}
</div>
);
}
private createVoiceBroadcastPreRecordingPipContent(
voiceBroadcastPreRecording: VoiceBroadcastPreRecording,
): CreatePipChildren {
return ({ onStartMoving }) => (
<div key="vb-pre-recording" onMouseDown={onStartMoving}>
<VoiceBroadcastPreRecordingPip voiceBroadcastPreRecording={voiceBroadcastPreRecording} />
</div>
);
}
private createVoiceBroadcastRecordingPipContent(
voiceBroadcastRecording: VoiceBroadcastRecording,
): CreatePipChildren {
return ({ onStartMoving }) => (
<div key={`vb-recording-${voiceBroadcastRecording.infoEvent.getId()}`} onMouseDown={onStartMoving}>
<VoiceBroadcastRecordingPip recording={voiceBroadcastRecording} />
</div>
);
}
public render(): ReactNode {
const pipMode = true;
let pipContent: Array<CreatePipChildren> = [];
if (this.props.voiceBroadcastRecording) {
pipContent = [this.createVoiceBroadcastRecordingPipContent(this.props.voiceBroadcastRecording)];
} else if (this.props.voiceBroadcastPreRecording) {
pipContent = [this.createVoiceBroadcastPreRecordingPipContent(this.props.voiceBroadcastPreRecording)];
} else if (this.props.voiceBroadcastPlayback) {
pipContent = [this.createVoiceBroadcastPlaybackPipContent(this.props.voiceBroadcastPlayback)];
}
const pipContent: Array<CreatePipChildren> = [];
if (this.state.primaryCall) {
// get a ref to call inside the current scope
@ -338,24 +280,7 @@ class PipContainerInner extends React.Component<IProps, IState> {
}
export const PipContainer: React.FC = () => {
const sdkContext = useContext(SDKContext);
const voiceBroadcastPreRecordingStore = sdkContext.voiceBroadcastPreRecordingStore;
const { currentVoiceBroadcastPreRecording } = useCurrentVoiceBroadcastPreRecording(voiceBroadcastPreRecordingStore);
const voiceBroadcastRecordingsStore = sdkContext.voiceBroadcastRecordingsStore;
const { currentVoiceBroadcastRecording } = useCurrentVoiceBroadcastRecording(voiceBroadcastRecordingsStore);
const voiceBroadcastPlaybacksStore = sdkContext.voiceBroadcastPlaybacksStore;
const { currentVoiceBroadcastPlayback } = useCurrentVoiceBroadcastPlayback(voiceBroadcastPlaybacksStore);
const movePersistedElement = useRef<() => void>();
return (
<PipContainerInner
voiceBroadcastPlayback={currentVoiceBroadcastPlayback}
voiceBroadcastPreRecording={currentVoiceBroadcastPreRecording}
voiceBroadcastRecording={currentVoiceBroadcastRecording}
movePersistedElement={movePersistedElement}
/>
);
return <PipContainerInner movePersistedElement={movePersistedElement} />;
};

View File

@ -40,8 +40,6 @@ import { UPDATE_SELECTED_SPACE } from "../../stores/spaces";
import UserIdentifierCustomisations from "../../customisations/UserIdentifier";
import PosthogTrackers from "../../PosthogTrackers";
import { ViewHomePagePayload } from "../../dispatcher/payloads/ViewHomePagePayload";
import { Icon as LiveIcon } from "../../../res/img/compound/live-8px.svg";
import { VoiceBroadcastRecording, VoiceBroadcastRecordingsStoreEvent } from "../../voice-broadcast";
import { SDKContext } from "../../contexts/SDKContext";
import { shouldShowFeedback } from "../../utils/Feedback";
import DarkLightModeSvg from "../../../res/img/element-icons/roomlist/dark-light-mode.svg";
@ -58,7 +56,6 @@ interface IState {
isDarkTheme: boolean;
isHighContrast: boolean;
selectedSpace?: Room | null;
showLiveAvatarAddon: boolean;
}
const toRightOf = (rect: PartialDOMRect): MenuProps => {
@ -94,7 +91,6 @@ export default class UserMenu extends React.Component<IProps, IState> {
isDarkTheme: this.isUserOnDarkTheme(),
isHighContrast: this.isUserOnHighContrastTheme(),
selectedSpace: SpaceStore.instance.activeSpaceRoom,
showLiveAvatarAddon: this.context.voiceBroadcastRecordingsStore.hasCurrent(),
};
}
@ -102,19 +98,9 @@ export default class UserMenu extends React.Component<IProps, IState> {
return !!getHomePageUrl(SdkConfig.get(), this.context.client!);
}
private onCurrentVoiceBroadcastRecordingChanged = (recording: VoiceBroadcastRecording | null): void => {
this.setState({
showLiveAvatarAddon: recording !== null,
});
};
public componentDidMount(): void {
OwnProfileStore.instance.on(UPDATE_EVENT, this.onProfileUpdate);
SpaceStore.instance.on(UPDATE_SELECTED_SPACE, this.onSelectedSpaceUpdate);
this.context.voiceBroadcastRecordingsStore.on(
VoiceBroadcastRecordingsStoreEvent.CurrentChanged,
this.onCurrentVoiceBroadcastRecordingChanged,
);
this.dispatcherRef = defaultDispatcher.register(this.onAction);
this.themeWatcherRef = SettingsStore.watchSetting("theme", null, this.onThemeChanged);
}
@ -125,10 +111,6 @@ export default class UserMenu extends React.Component<IProps, IState> {
defaultDispatcher.unregister(this.dispatcherRef);
OwnProfileStore.instance.off(UPDATE_EVENT, this.onProfileUpdate);
SpaceStore.instance.off(UPDATE_SELECTED_SPACE, this.onSelectedSpaceUpdate);
this.context.voiceBroadcastRecordingsStore.off(
VoiceBroadcastRecordingsStoreEvent.CurrentChanged,
this.onCurrentVoiceBroadcastRecordingChanged,
);
}
private isUserOnDarkTheme(): boolean {
@ -435,12 +417,6 @@ export default class UserMenu extends React.Component<IProps, IState> {
name = <div className="mx_UserMenu_name">{displayName}</div>;
}
const liveAvatarAddon = this.state.showLiveAvatarAddon ? (
<div className="mx_UserMenu_userAvatarLive" data-testid="user-menu-live-vb">
<LiveIcon className="mx_Icon_8" />
</div>
) : null;
return (
<div className="mx_UserMenu">
<ContextMenuButton
@ -459,7 +435,6 @@ export default class UserMenu extends React.Component<IProps, IState> {
size={avatarSize + "px"}
className="mx_UserMenu_userAvatar_BaseAvatar"
/>
{liveAvatarAddon}
</div>
{name}
{this.renderContextMenu()}

View File

@ -12,7 +12,6 @@ import { KnownMembership } from "matrix-js-sdk/src/types";
import { BaseGrouper } from "./BaseGrouper";
import MessagePanel, { WrappedEvent } from "../MessagePanel";
import { VoiceBroadcastInfoEventType } from "../../../voice-broadcast";
import DMRoomMap from "../../../utils/DMRoomMap";
import { _t } from "../../../languageHandler";
import DateSeparator from "../../views/messages/DateSeparator";
@ -53,11 +52,6 @@ export class CreationGrouper extends BaseGrouper {
return false;
}
if (VoiceBroadcastInfoEventType === eventType) {
// always show voice broadcast info events in timeline
return false;
}
if (event.isState() && event.getSender() === createEvent.getSender()) {
return true;
}

View File

@ -1,45 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022, 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { MutableRefObject } from "react";
import { toLeftOrRightOf } from "../../structures/ContextMenu";
import IconizedContextMenu, {
IconizedContextMenuOptionList,
IconizedContextMenuRadio,
} from "../context_menus/IconizedContextMenu";
interface Props {
containerRef: MutableRefObject<HTMLElement | null>;
currentDevice: MediaDeviceInfo | null;
devices: MediaDeviceInfo[];
onDeviceSelect: (device: MediaDeviceInfo) => void;
}
export const DevicesContextMenu: React.FC<Props> = ({ containerRef, currentDevice, devices, onDeviceSelect }) => {
const deviceOptions = devices.map((d: MediaDeviceInfo) => {
return (
<IconizedContextMenuRadio
key={d.deviceId}
active={d.deviceId === currentDevice?.deviceId}
onClick={() => onDeviceSelect(d)}
label={d.label}
/>
);
});
return (
<IconizedContextMenu
mountAsChild={false}
onFinished={() => {}}
{...(containerRef.current ? toLeftOrRightOf(containerRef.current.getBoundingClientRect(), 0) : {})}
>
<IconizedContextMenuOptionList>{deviceOptions}</IconizedContextMenuOptionList>
</IconizedContextMenu>
);
};

View File

@ -1,21 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { _t } from "../../../languageHandler";
import Modal from "../../../Modal";
import InfoDialog from "./InfoDialog";
export const createCantStartVoiceMessageBroadcastDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voice_message|cant_start_broadcast_title"),
description: <p>{_t("voice_message|cant_start_broadcast_description")}</p>,
hasCloseButton: true,
});
};

View File

@ -6,14 +6,12 @@ SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { Feature, ServerSupport } from "matrix-js-sdk/src/feature";
import { IRedactOpts, MatrixEvent, RelationType } from "matrix-js-sdk/src/matrix";
import { IRedactOpts, MatrixEvent } from "matrix-js-sdk/src/matrix";
import React from "react";
import { _t } from "../../../languageHandler";
import { MatrixClientPeg } from "../../../MatrixClientPeg";
import Modal from "../../../Modal";
import { isVoiceBroadcastStartedEvent } from "../../../voice-broadcast/utils/isVoiceBroadcastStartedEvent";
import ErrorDialog from "./ErrorDialog";
import TextInputDialog from "./TextInputDialog";
@ -70,18 +68,6 @@ export function createRedactEventDialog({
const cli = MatrixClientPeg.safeGet();
const withRelTypes: Pick<IRedactOpts, "with_rel_types"> = {};
// redact related events if this is a voice broadcast started event and
// server has support for relation based redactions
if (isVoiceBroadcastStartedEvent(mxEvent)) {
const relationBasedRedactionsSupport = cli.canSupport.get(Feature.RelationBasedRedactions);
if (
relationBasedRedactionsSupport &&
relationBasedRedactionsSupport !== ServerSupport.Unsupported
) {
withRelTypes.with_rel_types = [RelationType.Reference];
}
}
try {
onCloseDialog?.();
await cli.redactEvent(roomId, eventId, undefined, {

View File

@ -22,7 +22,6 @@ import { AccountDataExplorer, RoomAccountDataExplorer } from "./devtools/Account
import SettingsFlag from "../elements/SettingsFlag";
import { SettingLevel } from "../../../settings/SettingLevel";
import ServerInfo from "./devtools/ServerInfo";
import { Features } from "../../../settings/Settings";
import CopyableText from "../elements/CopyableText";
import RoomNotifications from "./devtools/RoomNotifications";
@ -100,7 +99,6 @@ const DevtoolsDialog: React.FC<IProps> = ({ roomId, threadRootId, onFinished })
<SettingsFlag name="developerMode" level={SettingLevel.ACCOUNT} />
<SettingsFlag name="showHiddenEventsInTimeline" level={SettingLevel.DEVICE} />
<SettingsFlag name="enableWidgetScreenshots" level={SettingLevel.ACCOUNT} />
<SettingsFlag name={Features.VoiceBroadcastForceSmallChunks} level={SettingLevel.DEVICE} />
</div>
</BaseTool>
);

View File

@ -58,7 +58,6 @@ import { ALTERNATE_KEY_NAME } from "../../../accessibility/KeyboardShortcuts";
import { Action } from "../../../dispatcher/actions";
import { ShowThreadPayload } from "../../../dispatcher/payloads/ShowThreadPayload";
import { GetRelationsForEvent, IEventTileType } from "../rooms/EventTile";
import { VoiceBroadcastInfoEventType } from "../../../voice-broadcast/types";
import { ButtonEvent } from "../elements/AccessibleButton";
import PinningUtils from "../../../utils/PinningUtils";
import PosthogTrackers from "../../../PosthogTrackers.ts";
@ -354,8 +353,7 @@ export default class MessageActionBar extends React.PureComponent<IMessageAction
* until cross-platform support
* (PSF-1041)
*/
!M_BEACON_INFO.matches(this.props.mxEvent.getType()) &&
!(this.props.mxEvent.getType() === VoiceBroadcastInfoEventType);
!M_BEACON_INFO.matches(this.props.mxEvent.getType());
return inNotThreadTimeline && isAllowedMessageType;
}

View File

@ -41,7 +41,6 @@ import MjolnirBody from "./MjolnirBody";
import MBeaconBody from "./MBeaconBody";
import { DecryptionFailureBody } from "./DecryptionFailureBody";
import { GetRelationsForEvent, IEventTileOps } from "../rooms/EventTile";
import { VoiceBroadcastBody, VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "../../../voice-broadcast";
// onMessageAllowed is handled internally
interface IProps extends Omit<IBodyProps, "onMessageAllowed" | "mediaEventHelper"> {
@ -276,10 +275,6 @@ export default class MessageEvent extends React.Component<IProps> implements IMe
if (M_LOCATION.matches(type) || (type === EventType.RoomMessage && msgtype === MsgType.Location)) {
BodyType = MLocationBody;
}
if (type === VoiceBroadcastInfoEventType && content?.state === VoiceBroadcastInfoState.Started) {
BodyType = VoiceBroadcastBody;
}
}
if (SettingsStore.getValue("feature_mjolnir")) {

View File

@ -48,14 +48,9 @@ import MessageComposerButtons from "./MessageComposerButtons";
import AccessibleButton, { ButtonEvent } from "../elements/AccessibleButton";
import { ViewRoomPayload } from "../../../dispatcher/payloads/ViewRoomPayload";
import { isLocalRoom } from "../../../utils/localRoom/isLocalRoom";
import { Features } from "../../../settings/Settings";
import { VoiceMessageRecording } from "../../../audio/VoiceMessageRecording";
import { SendWysiwygComposer, sendMessage, getConversionFunctions } from "./wysiwyg_composer/";
import { MatrixClientProps, withMatrixClientHOC } from "../../../contexts/MatrixClientContext";
import { setUpVoiceBroadcastPreRecording } from "../../../voice-broadcast/utils/setUpVoiceBroadcastPreRecording";
import { SdkContextClass } from "../../../contexts/SDKContext";
import { VoiceBroadcastInfoState } from "../../../voice-broadcast";
import { createCantStartVoiceMessageBroadcastDialog } from "../dialogs/CantStartVoiceMessageBroadcastDialog";
import { UIFeature } from "../../../settings/UIFeature";
import { formatTimeLeft } from "../../../DateUtils";
import RoomReplacedSvg from "../../../../res/img/room_replaced.svg";
@ -101,7 +96,6 @@ interface IState {
isStickerPickerOpen: boolean;
showStickersButton: boolean;
showPollsButton: boolean;
showVoiceBroadcastButton: boolean;
isWysiwygLabEnabled: boolean;
isRichTextEnabled: boolean;
initialComposerContent: string;
@ -127,7 +121,6 @@ export class MessageComposer extends React.Component<IProps, IState> {
public static defaultProps = {
compact: false,
showVoiceBroadcastButton: false,
isRichTextEnabled: true,
};
@ -155,7 +148,6 @@ export class MessageComposer extends React.Component<IProps, IState> {
isStickerPickerOpen: false,
showStickersButton: SettingsStore.getValue("MessageComposerInput.showStickersButton"),
showPollsButton: SettingsStore.getValue("MessageComposerInput.showPollsButton"),
showVoiceBroadcastButton: SettingsStore.getValue(Features.VoiceBroadcast),
isWysiwygLabEnabled: isWysiwygLabEnabled,
isRichTextEnabled: isRichTextEnabled,
initialComposerContent: initialComposerContent,
@ -250,7 +242,6 @@ export class MessageComposer extends React.Component<IProps, IState> {
SettingsStore.monitorSetting("MessageComposerInput.showStickersButton", null);
SettingsStore.monitorSetting("MessageComposerInput.showPollsButton", null);
SettingsStore.monitorSetting(Features.VoiceBroadcast, null);
SettingsStore.monitorSetting("feature_wysiwyg_composer", null);
this.dispatcherRef = dis.register(this.onAction);
@ -301,12 +292,6 @@ export class MessageComposer extends React.Component<IProps, IState> {
}
break;
}
case Features.VoiceBroadcast: {
if (this.state.showVoiceBroadcastButton !== settingUpdatedPayload.newValue) {
this.setState({ showVoiceBroadcastButton: !!settingUpdatedPayload.newValue });
}
break;
}
case "feature_wysiwyg_composer": {
if (this.state.isWysiwygLabEnabled !== settingUpdatedPayload.newValue) {
this.setState({ isWysiwygLabEnabled: Boolean(settingUpdatedPayload.newValue) });
@ -533,13 +518,7 @@ export class MessageComposer extends React.Component<IProps, IState> {
}
private onRecordStartEndClick = (): void => {
const currentBroadcastRecording = SdkContextClass.instance.voiceBroadcastRecordingsStore.getCurrent();
if (currentBroadcastRecording && currentBroadcastRecording.getState() !== VoiceBroadcastInfoState.Stopped) {
createCantStartVoiceMessageBroadcastDialog();
} else {
this.voiceRecordingButton.current?.onRecordStartEndClick();
}
this.voiceRecordingButton.current?.onRecordStartEndClick();
if (this.context.narrow) {
this.toggleButtonMenu();
@ -698,17 +677,6 @@ export class MessageComposer extends React.Component<IProps, IState> {
isRichTextEnabled={this.state.isRichTextEnabled}
onComposerModeClick={this.onRichTextToggle}
toggleButtonMenu={this.toggleButtonMenu}
showVoiceBroadcastButton={this.state.showVoiceBroadcastButton}
onStartVoiceBroadcastClick={() => {
setUpVoiceBroadcastPreRecording(
this.props.room,
MatrixClientPeg.safeGet(),
SdkContextClass.instance.voiceBroadcastPlaybacksStore,
SdkContextClass.instance.voiceBroadcastRecordingsStore,
SdkContextClass.instance.voiceBroadcastPreRecordingStore,
);
this.toggleButtonMenu();
}}
/>
)}
{showSendButton && (

View File

@ -43,8 +43,6 @@ interface IProps {
showPollsButton: boolean;
showStickersButton: boolean;
toggleButtonMenu: () => void;
showVoiceBroadcastButton: boolean;
onStartVoiceBroadcastClick: () => void;
isRichTextEnabled: boolean;
onComposerModeClick: () => void;
}
@ -80,7 +78,6 @@ const MessageComposerButtons: React.FC<IProps> = (props: IProps) => {
uploadButton(), // props passed via UploadButtonContext
showStickersButton(props),
voiceRecordingButton(props, narrow),
startVoiceBroadcastButton(props),
props.showPollsButton ? pollButton(room, props.relation) : null,
showLocationButton(props, room, matrixClient),
];
@ -100,7 +97,6 @@ const MessageComposerButtons: React.FC<IProps> = (props: IProps) => {
moreButtons = [
showStickersButton(props),
voiceRecordingButton(props, narrow),
startVoiceBroadcastButton(props),
props.showPollsButton ? pollButton(room, props.relation) : null,
showLocationButton(props, room, matrixClient),
];
@ -254,18 +250,6 @@ function showStickersButton(props: IProps): ReactElement | null {
) : null;
}
const startVoiceBroadcastButton: React.FC<IProps> = (props: IProps): ReactElement | null => {
return props.showVoiceBroadcastButton ? (
<CollapsibleButton
key="start_voice_broadcast"
className="mx_MessageComposer_button"
iconClassName="mx_MessageComposer_voiceBroadcast"
onClick={props.onStartVoiceBroadcastClick}
title={_t("voice_broadcast|action")}
/>
) : null;
};
function voiceRecordingButton(props: IProps, narrow: boolean): ReactElement | null {
// XXX: recording UI does not work well in narrow mode, so hide for now
return narrow ? null : (

View File

@ -39,7 +39,6 @@ import { getKeyBindingsManager } from "../../../KeyBindingsManager";
import { RoomGeneralContextMenu } from "../context_menus/RoomGeneralContextMenu";
import { CallStore, CallStoreEvent } from "../../../stores/CallStore";
import { SdkContextClass } from "../../../contexts/SDKContext";
import { useHasRoomLiveVoiceBroadcast } from "../../../voice-broadcast";
import { RoomTileSubtitle } from "./RoomTileSubtitle";
import { shouldShowComponent } from "../../../customisations/helpers/UIComponents";
import { UIComponent } from "../../../settings/UIFeature";
@ -53,10 +52,6 @@ interface Props {
tag: TagID;
}
interface ClassProps extends Props {
hasLiveVoiceBroadcast: boolean;
}
type PartialDOMRect = Pick<DOMRect, "left" | "bottom">;
interface State {
@ -77,13 +72,13 @@ export const contextMenuBelow = (elementRect: PartialDOMRect): MenuProps => {
return { left, top, chevronFace };
};
export class RoomTile extends React.PureComponent<ClassProps, State> {
class RoomTile extends React.PureComponent<Props, State> {
private dispatcherRef?: string;
private roomTileRef = createRef<HTMLDivElement>();
private notificationState: NotificationState;
private roomProps: RoomEchoChamber;
public constructor(props: ClassProps) {
public constructor(props: Props) {
super(props);
this.state = {
@ -370,15 +365,10 @@ export class RoomTile extends React.PureComponent<ClassProps, State> {
/**
* RoomTile has a subtile if one of the following applies:
* - there is a call
* - there is a live voice broadcast
* - message previews are enabled and there is a previewable message
*/
private get shouldRenderSubtitle(): boolean {
return (
!!this.state.call ||
this.props.hasLiveVoiceBroadcast ||
(this.props.showMessagePreview && !!this.state.messagePreview)
);
return !!this.state.call || (this.props.showMessagePreview && !!this.state.messagePreview);
}
public render(): React.ReactElement {
@ -409,7 +399,6 @@ export class RoomTile extends React.PureComponent<ClassProps, State> {
const subtitle = this.shouldRenderSubtitle ? (
<RoomTileSubtitle
call={this.state.call}
hasLiveVoiceBroadcast={this.props.hasLiveVoiceBroadcast}
messagePreview={this.state.messagePreview}
roomId={this.props.room.roomId}
showMessagePreview={this.props.showMessagePreview}
@ -491,9 +480,4 @@ export class RoomTile extends React.PureComponent<ClassProps, State> {
}
}
const RoomTileHOC: React.FC<Props> = (props: Props) => {
const hasLiveVoiceBroadcast = useHasRoomLiveVoiceBroadcast(props.room);
return <RoomTile {...props} hasLiveVoiceBroadcast={hasLiveVoiceBroadcast} />;
};
export default RoomTileHOC;
export default RoomTile;

View File

@ -13,11 +13,9 @@ import { ThreadsIcon } from "@vector-im/compound-design-tokens/assets/web/icons"
import { MessagePreview } from "../../../stores/room-list/MessagePreviewStore";
import { Call } from "../../../models/Call";
import { RoomTileCallSummary } from "./RoomTileCallSummary";
import { VoiceBroadcastRoomSubtitle } from "../../../voice-broadcast";
interface Props {
call: Call | null;
hasLiveVoiceBroadcast: boolean;
messagePreview: MessagePreview | null;
roomId: string;
showMessagePreview: boolean;
@ -25,13 +23,7 @@ interface Props {
const messagePreviewId = (roomId: string): string => `mx_RoomTile_messagePreview_${roomId}`;
export const RoomTileSubtitle: React.FC<Props> = ({
call,
hasLiveVoiceBroadcast,
messagePreview,
roomId,
showMessagePreview,
}) => {
export const RoomTileSubtitle: React.FC<Props> = ({ call, messagePreview, roomId, showMessagePreview }) => {
if (call) {
return (
<div className="mx_RoomTile_subtitle">
@ -40,10 +32,6 @@ export const RoomTileSubtitle: React.FC<Props> = ({
);
}
if (hasLiveVoiceBroadcast) {
return <VoiceBroadcastRoomSubtitle />;
}
if (showMessagePreview && messagePreview) {
const className = classNames("mx_RoomTile_subtitle", {
"mx_RoomTile_subtitle--thread-reply": messagePreview.isThreadReply,

View File

@ -19,7 +19,6 @@ import ErrorDialog from "../../../dialogs/ErrorDialog";
import PowerSelector from "../../../elements/PowerSelector";
import SettingsFieldset from "../../SettingsFieldset";
import SettingsStore from "../../../../../settings/SettingsStore";
import { VoiceBroadcastInfoEventType } from "../../../../../voice-broadcast";
import { ElementCall } from "../../../../../models/Call";
import SdkConfig, { DEFAULTS } from "../../../../../SdkConfig";
import { AddPrivilegedUsers } from "../../AddPrivilegedUsers";
@ -62,7 +61,6 @@ const plEventsToShow: Record<string, IEventShowOpts> = {
// TODO: Enable support for m.widget event type (https://github.com/vector-im/element-web/issues/13111)
"im.vector.modular.widgets": { isState: true, hideForSpace: true },
[VoiceBroadcastInfoEventType]: { isState: true, hideForSpace: true },
};
// parse a string as an integer; if the input is undefined, or cannot be parsed
@ -289,7 +287,6 @@ export default class RolesRoomSettingsTab extends React.Component<IProps, RolesR
// TODO: Enable support for m.widget event type (https://github.com/vector-im/element-web/issues/13111)
"im.vector.modular.widgets": isSpaceRoom ? null : _td("room_settings|permissions|m.widget"),
[VoiceBroadcastInfoEventType]: _td("room_settings|permissions|io.element.voice_broadcast_info"),
};
// MSC3401: Native Group VoIP signaling

View File

@ -25,11 +25,6 @@ import { WidgetLayoutStore } from "../stores/widgets/WidgetLayoutStore";
import { WidgetPermissionStore } from "../stores/widgets/WidgetPermissionStore";
import { OidcClientStore } from "../stores/oidc/OidcClientStore";
import WidgetStore from "../stores/WidgetStore";
import {
VoiceBroadcastPlaybacksStore,
VoiceBroadcastPreRecordingStore,
VoiceBroadcastRecordingsStore,
} from "../voice-broadcast";
// This context is available to components under MatrixChat,
// the context must not be used by components outside a SdkContextClass tree.
@ -68,9 +63,6 @@ export class SdkContextClass {
protected _SpaceStore?: SpaceStoreClass;
protected _LegacyCallHandler?: LegacyCallHandler;
protected _TypingStore?: TypingStore;
protected _VoiceBroadcastRecordingsStore?: VoiceBroadcastRecordingsStore;
protected _VoiceBroadcastPreRecordingStore?: VoiceBroadcastPreRecordingStore;
protected _VoiceBroadcastPlaybacksStore?: VoiceBroadcastPlaybacksStore;
protected _AccountPasswordStore?: AccountPasswordStore;
protected _UserProfilesStore?: UserProfilesStore;
protected _OidcClientStore?: OidcClientStore;
@ -157,27 +149,6 @@ export class SdkContextClass {
return this._TypingStore;
}
public get voiceBroadcastRecordingsStore(): VoiceBroadcastRecordingsStore {
if (!this._VoiceBroadcastRecordingsStore) {
this._VoiceBroadcastRecordingsStore = new VoiceBroadcastRecordingsStore();
}
return this._VoiceBroadcastRecordingsStore;
}
public get voiceBroadcastPreRecordingStore(): VoiceBroadcastPreRecordingStore {
if (!this._VoiceBroadcastPreRecordingStore) {
this._VoiceBroadcastPreRecordingStore = new VoiceBroadcastPreRecordingStore();
}
return this._VoiceBroadcastPreRecordingStore;
}
public get voiceBroadcastPlaybacksStore(): VoiceBroadcastPlaybacksStore {
if (!this._VoiceBroadcastPlaybacksStore) {
this._VoiceBroadcastPlaybacksStore = new VoiceBroadcastPlaybacksStore(this.voiceBroadcastRecordingsStore);
}
return this._VoiceBroadcastPlaybacksStore;
}
public get accountPasswordStore(): AccountPasswordStore {
if (!this._AccountPasswordStore) {
this._AccountPasswordStore = new AccountPasswordStore();

View File

@ -41,13 +41,7 @@ import { getMessageModerationState, MessageModerationState } from "../utils/Even
import HiddenBody from "../components/views/messages/HiddenBody";
import ViewSourceEvent from "../components/views/messages/ViewSourceEvent";
import { shouldDisplayAsBeaconTile } from "../utils/beacon/timeline";
import { shouldDisplayAsVoiceBroadcastTile } from "../voice-broadcast/utils/shouldDisplayAsVoiceBroadcastTile";
import { ElementCall } from "../models/Call";
import {
isRelatedToVoiceBroadcast,
shouldDisplayAsVoiceBroadcastStoppedText,
VoiceBroadcastChunkEventType,
} from "../voice-broadcast";
// Subset of EventTile's IProps plus some mixins
export interface EventTileTypeProps
@ -223,12 +217,6 @@ export function pickFactory(
return MessageEventFactory;
}
if (shouldDisplayAsVoiceBroadcastTile(mxEvent)) {
return MessageEventFactory;
} else if (shouldDisplayAsVoiceBroadcastStoppedText(mxEvent)) {
return TextualEventFactory;
}
if (SINGULAR_STATE_EVENTS.has(evType) && mxEvent.getStateKey() !== "") {
return noEventFactoryFactory(); // improper event type to render
}
@ -249,16 +237,6 @@ export function pickFactory(
return noEventFactoryFactory();
}
if (mxEvent.getContent()[VoiceBroadcastChunkEventType]) {
// hide voice broadcast chunks
return noEventFactoryFactory();
}
if (!showHiddenEvents && mxEvent.isDecryptionFailure() && isRelatedToVoiceBroadcast(mxEvent, cli)) {
// hide utd events related to a broadcast
return noEventFactoryFactory();
}
return EVENT_TILE_TYPES.get(evType) ?? noEventFactoryFactory();
}

View File

@ -9,7 +9,6 @@ Please see LICENSE files in the repository root for full details.
import { M_POLL_END, M_POLL_START, M_BEACON_INFO, MatrixEvent, MatrixClient } from "matrix-js-sdk/src/matrix";
import { getShareableLocationEventForBeacon } from "../../utils/beacon/getShareableLocation";
import { VoiceBroadcastInfoEventType } from "../../voice-broadcast/types";
/**
* Get forwardable event for a given event
@ -20,8 +19,6 @@ export const getForwardableEvent = (event: MatrixEvent, cli: MatrixClient): Matr
return null;
}
if (event.getType() === VoiceBroadcastInfoEventType) return null;
// Live location beacons should forward their latest location as a static pin location
// If the beacon is not live, or doesn't have a location forwarding is not allowed
if (M_BEACON_INFO.matches(event.getType())) {

View File

@ -1,76 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { useRef, useState } from "react";
import { _t } from "../languageHandler";
import MediaDeviceHandler, { MediaDeviceKindEnum } from "../MediaDeviceHandler";
import { requestMediaPermissions } from "../utils/media/requestMediaPermissions";
interface State {
devices: MediaDeviceInfo[];
device: MediaDeviceInfo | null;
}
export const useAudioDeviceSelection = (
onDeviceChanged?: (device: MediaDeviceInfo) => void,
): {
currentDevice: MediaDeviceInfo | null;
currentDeviceLabel: string;
devices: MediaDeviceInfo[];
setDevice(device: MediaDeviceInfo): void;
} => {
const shouldRequestPermissionsRef = useRef<boolean>(true);
const [state, setState] = useState<State>({
devices: [],
device: null,
});
if (shouldRequestPermissionsRef.current) {
shouldRequestPermissionsRef.current = false;
requestMediaPermissions(false).then((stream: MediaStream | undefined) => {
MediaDeviceHandler.getDevices().then((devices) => {
if (!devices) return;
const { audioinput } = devices;
MediaDeviceHandler.getDefaultDevice(audioinput);
const deviceFromSettings = MediaDeviceHandler.getAudioInput();
const device =
audioinput.find((d) => {
return d.deviceId === deviceFromSettings;
}) || audioinput[0];
setState({
...state,
devices: audioinput,
device,
});
stream?.getTracks().forEach((t) => t.stop());
});
});
}
const setDevice = (device: MediaDeviceInfo): void => {
const shouldNotify = device.deviceId !== state.device?.deviceId;
MediaDeviceHandler.instance.setDevice(device.deviceId, MediaDeviceKindEnum.AudioInput);
setState({
...state,
device,
});
if (shouldNotify) {
onDeviceChanged?.(device);
}
};
return {
currentDevice: state.device,
currentDeviceLabel: state.device?.label || _t("voip|default_device"),
devices: state.devices,
setDevice,
};
};

View File

@ -1087,10 +1087,6 @@
},
"error_user_not_logged_in": "User is not logged in",
"event_preview": {
"io.element.voice_broadcast_info": {
"user": "%(senderName)s ended a voice broadcast",
"you": "You ended a voice broadcast"
},
"m.call.answer": {
"dm": "Call in progress",
"user": "%(senderName)s joined the call",
@ -1491,8 +1487,6 @@
"video_rooms_faq2_answer": "Yes, the chat timeline is displayed alongside the video.",
"video_rooms_faq2_question": "Can I use text chat alongside the video call?",
"video_rooms_feedbackSubheading": "Thank you for trying the beta, please go into as much detail as you can so we can improve it.",
"voice_broadcast": "Voice broadcast",
"voice_broadcast_force_small_chunks": "Force 15s voice broadcast chunk length",
"wysiwyg_composer": "Rich text editor"
},
"labs_mjolnir": {
@ -1638,7 +1632,6 @@
"mute_description": "You won't get any notifications"
},
"notifier": {
"io.element.voice_broadcast_chunk": "%(senderName)s started a voice broadcast",
"m.key.verification.request": "%(name)s is requesting verification"
},
"onboarding": {
@ -2253,7 +2246,6 @@
"error_unbanning": "Failed to unban",
"events_default": "Send messages",
"invite": "Invite users",
"io.element.voice_broadcast_info": "Voice broadcasts",
"kick": "Remove users",
"m.call": "Start %(brand)s calls",
"m.call.member": "Join %(brand)s calls",
@ -3287,10 +3279,6 @@
"error_rendering_message": "Can't load this message",
"historical_messages_unavailable": "You can't see earlier messages",
"in_room_name": " in <strong>%(room)s</strong>",
"io.element.voice_broadcast_info": {
"user": "%(senderName)s ended a <a>voice broadcast</a>",
"you": "You ended a <a>voice broadcast</a>"
},
"io.element.widgets.layout": "%(senderName)s has updated the room layout",
"late_event_separator": "Originally sent %(dateTime)s",
"load_error": {
@ -3840,38 +3828,6 @@
"switch_theme_dark": "Switch to dark mode",
"switch_theme_light": "Switch to light mode"
},
"voice_broadcast": {
"30s_backward": "30s backward",
"30s_forward": "30s forward",
"action": "Voice broadcast",
"buffering": "Buffering…",
"confirm_listen_affirm": "Yes, end my recording",
"confirm_listen_description": "If you start listening to this live broadcast, your current live broadcast recording will be ended.",
"confirm_listen_title": "Listen to live broadcast?",
"confirm_stop_affirm": "Yes, stop broadcast",
"confirm_stop_description": "Are you sure you want to stop your live broadcast? This will end the broadcast and the full recording will be available in the room.",
"confirm_stop_title": "Stop live broadcasting?",
"connection_error": "Connection error - Recording paused",
"failed_already_recording_description": "You are already recording a voice broadcast. Please end your current voice broadcast to start a new one.",
"failed_already_recording_title": "Can't start a new voice broadcast",
"failed_decrypt": "Unable to decrypt voice broadcast",
"failed_generic": "Unable to play this voice broadcast",
"failed_insufficient_permission_description": "You don't have the required permissions to start a voice broadcast in this room. Contact a room administrator to upgrade your permissions.",
"failed_insufficient_permission_title": "Can't start a new voice broadcast",
"failed_no_connection_description": "Unfortunately we're unable to start a recording right now. Please try again later.",
"failed_no_connection_title": "Connection error",
"failed_others_already_recording_description": "Someone else is already recording a voice broadcast. Wait for their voice broadcast to end to start a new one.",
"failed_others_already_recording_title": "Can't start a new voice broadcast",
"go_live": "Go live",
"live": "Live",
"pause": "pause voice broadcast",
"play": "play voice broadcast",
"resume": "resume voice broadcast"
},
"voice_message": {
"cant_start_broadcast_description": "You can't start a voice message as you are currently recording a live broadcast. Please end your live broadcast in order to start recording a voice message.",
"cant_start_broadcast_title": "Can't start voice message"
},
"voip": {
"already_in_call": "Already in call",
"already_in_call_person": "You're already in a call with this person.",
@ -3891,7 +3847,6 @@
"camera_disabled": "Your camera is turned off",
"camera_enabled": "Your camera is still enabled",
"cannot_call_yourself_description": "You cannot place a call with yourself.",
"change_input_device": "Change input device",
"close_lobby": "Close lobby",
"connecting": "Connecting",
"connection_lost": "Connectivity to the server has been lost",
@ -3910,8 +3865,6 @@
"enable_camera": "Turn on camera",
"enable_microphone": "Unmute microphone",
"expand": "Return to call",
"failed_call_live_broadcast_description": "You cant start a call as you are currently recording a live broadcast. Please end your live broadcast in order to start a call.",
"failed_call_live_broadcast_title": "Cant start a call",
"get_call_link": "Share call link",
"hangup": "Hangup",
"hide_sidebar_button": "Hide sidebar",

View File

@ -85,8 +85,6 @@ export enum LabGroup {
}
export enum Features {
VoiceBroadcast = "feature_voice_broadcast",
VoiceBroadcastForceSmallChunks = "feature_voice_broadcast_force_small_chunks",
NotificationSettings2 = "feature_notification_settings2",
OidcNativeFlow = "feature_oidc_native_flow",
ReleaseAnnouncement = "feature_release_announcement",
@ -440,19 +438,6 @@ export const SETTINGS: { [setting: string]: ISetting } = {
shouldWarn: true,
default: false,
},
[Features.VoiceBroadcast]: {
isFeature: true,
labsGroup: LabGroup.Messaging,
supportedLevels: LEVELS_DEVICE_ONLY_SETTINGS_WITH_CONFIG_PRIORITISED,
supportedLevelsAreOrdered: true,
displayName: _td("labs|voice_broadcast"),
default: false,
},
[Features.VoiceBroadcastForceSmallChunks]: {
supportedLevels: LEVELS_DEVICE_ONLY_SETTINGS,
displayName: _td("labs|voice_broadcast_force_small_chunks"),
default: false,
},
[Features.OidcNativeFlow]: {
isFeature: true,
labsGroup: LabGroup.Developer,

View File

@ -42,15 +42,6 @@ import { UPDATE_EVENT } from "./AsyncStore";
import { SdkContextClass } from "../contexts/SDKContext";
import { CallStore } from "./CallStore";
import { ThreadPayload } from "../dispatcher/payloads/ThreadPayload";
import {
doClearCurrentVoiceBroadcastPlaybackIfStopped,
doMaybeSetCurrentVoiceBroadcastPlayback,
VoiceBroadcastRecording,
VoiceBroadcastRecordingsStoreEvent,
} from "../voice-broadcast";
import { IRoomStateEventsActionPayload } from "../actions/MatrixActionCreators";
import { showCantStartACallDialog } from "../voice-broadcast/utils/showCantStartACallDialog";
import { pauseNonLiveBroadcastFromOtherRoom } from "../voice-broadcast/utils/pauseNonLiveBroadcastFromOtherRoom";
import { ActionPayload } from "../dispatcher/payloads";
import { CancelAskToJoinPayload } from "../dispatcher/payloads/CancelAskToJoinPayload";
import { SubmitAskToJoinPayload } from "../dispatcher/payloads/SubmitAskToJoinPayload";
@ -164,10 +155,6 @@ export class RoomViewStore extends EventEmitter {
) {
super();
this.resetDispatcher(dis);
this.stores.voiceBroadcastRecordingsStore.addListener(
VoiceBroadcastRecordingsStoreEvent.CurrentChanged,
this.onCurrentBroadcastRecordingChanged,
);
}
public addRoomListener(roomId: string, fn: Listener): void {
@ -182,16 +169,6 @@ export class RoomViewStore extends EventEmitter {
this.emit(roomId, isActive);
}
private onCurrentBroadcastRecordingChanged = (recording: VoiceBroadcastRecording | null): void => {
if (recording === null) {
const room = this.stores.client?.getRoom(this.state.roomId || undefined);
if (room) {
this.doMaybeSetCurrentVoiceBroadcastPlayback(room);
}
}
};
private setState(newState: Partial<State>): void {
// If values haven't changed, there's nothing to do.
// This only tries a shallow comparison, so unchanged objects will slip
@ -207,16 +184,6 @@ export class RoomViewStore extends EventEmitter {
return;
}
if (newState.viewingCall) {
// Pause current broadcast, if any
this.stores.voiceBroadcastPlaybacksStore.getCurrent()?.pause();
if (this.stores.voiceBroadcastRecordingsStore.getCurrent()) {
showCantStartACallDialog();
newState.viewingCall = false;
}
}
const lastRoomId = this.state.roomId;
this.state = Object.assign(this.state, newState);
if (lastRoomId !== this.state.roomId) {
@ -235,29 +202,6 @@ export class RoomViewStore extends EventEmitter {
this.emit(UPDATE_EVENT);
}
private doMaybeSetCurrentVoiceBroadcastPlayback(room: Room): void {
if (!this.stores.client) return;
doMaybeSetCurrentVoiceBroadcastPlayback(
room,
this.stores.client,
this.stores.voiceBroadcastPlaybacksStore,
this.stores.voiceBroadcastRecordingsStore,
);
}
private onRoomStateEvents(event: MatrixEvent): void {
const roomId = event.getRoomId?.();
// no room or not current room
if (!roomId || roomId !== this.state.roomId) return;
const room = this.stores.client?.getRoom(roomId);
if (room) {
this.doMaybeSetCurrentVoiceBroadcastPlayback(room);
}
}
private onDispatch(payload: ActionPayload): void {
// eslint-disable-line @typescript-eslint/naming-convention
switch (payload.action) {
@ -283,10 +227,6 @@ export class RoomViewStore extends EventEmitter {
wasContextSwitch: false,
viewingCall: false,
});
doClearCurrentVoiceBroadcastPlaybackIfStopped(this.stores.voiceBroadcastPlaybacksStore);
break;
case "MatrixActions.RoomState.events":
this.onRoomStateEvents((payload as IRoomStateEventsActionPayload).event);
break;
case Action.ViewRoomError:
this.viewRoomError(payload as ViewRoomErrorPayload);
@ -489,9 +429,6 @@ export class RoomViewStore extends EventEmitter {
}
if (room) {
pauseNonLiveBroadcastFromOtherRoom(room, this.stores.voiceBroadcastPlaybacksStore);
this.doMaybeSetCurrentVoiceBroadcastPlayback(room);
await setMarkedUnreadState(room, MatrixClientPeg.safeGet(), false);
}
} else if (payload.room_alias) {

View File

@ -22,8 +22,6 @@ import { StickerEventPreview } from "./previews/StickerEventPreview";
import { ReactionEventPreview } from "./previews/ReactionEventPreview";
import { UPDATE_EVENT } from "../AsyncStore";
import { IPreview } from "./previews/IPreview";
import { VoiceBroadcastInfoEventType } from "../../voice-broadcast";
import { VoiceBroadcastPreview } from "./previews/VoiceBroadcastPreview";
import shouldHideEvent from "../../shouldHideEvent";
// Emitted event for when a room's preview has changed. First argument will the room for which
@ -69,10 +67,6 @@ const PREVIEWS: Record<
isState: false,
previewer: new PollStartEventPreview(),
},
[VoiceBroadcastInfoEventType]: {
isState: true,
previewer: new VoiceBroadcastPreview(),
},
};
// The maximum number of events we're willing to look back on to get a preview.

View File

@ -14,15 +14,11 @@ import { _t, sanitizeForTranslation } from "../../../languageHandler";
import { getSenderName, isSelf, shouldPrefixMessagesIn } from "./utils";
import { getHtmlText } from "../../../HtmlUtils";
import { stripHTMLReply, stripPlainReply } from "../../../utils/Reply";
import { VoiceBroadcastChunkEventType } from "../../../voice-broadcast/types";
export class MessageEventPreview implements IPreview {
public getTextFor(event: MatrixEvent, tagId?: TagID, isThread?: boolean): string | null {
let eventContent = event.getContent();
// no preview for broadcast chunks
if (eventContent[VoiceBroadcastChunkEventType]) return null;
if (event.isRelation(RelationType.Replace)) {
// It's an edit, generate the preview on the new text
eventContent = event.getContent()["m.new_content"];

View File

@ -1,23 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoState } from "../../../voice-broadcast/types";
import { textForVoiceBroadcastStoppedEventWithoutLink } from "../../../voice-broadcast/utils/textForVoiceBroadcastStoppedEventWithoutLink";
import { IPreview } from "./IPreview";
export class VoiceBroadcastPreview implements IPreview {
public getTextFor(event: MatrixEvent, tagId?: string, isThread?: boolean): string | null {
if (!event.isRedacted() && event.getContent()?.state === VoiceBroadcastInfoState.Stopped) {
return textForVoiceBroadcastStoppedEventWithoutLink(event);
}
return null;
}
}

View File

@ -284,10 +284,6 @@ export class StopGapWidget extends EventEmitter {
});
this.messaging.on("capabilitiesNotified", () => this.emit("capabilitiesNotified"));
this.messaging.on(`action:${WidgetApiFromWidgetAction.OpenModalWidget}`, this.onOpenModal);
this.messaging.on(`action:${ElementWidgetActions.JoinCall}`, () => {
// pause voice broadcast recording when any widget sends a "join"
SdkContextClass.instance.voiceBroadcastRecordingsStore.getCurrent()?.pause();
});
// Always attach a handler for ViewRoom, but permission check it internally
this.messaging.on(`action:${ElementWidgetActions.ViewRoom}`, (ev: CustomEvent<IViewRoomApiRequest>) => {

View File

@ -21,7 +21,6 @@ import SettingsStore from "../settings/SettingsStore";
import { haveRendererForEvent, JitsiEventFactory, JSONEventFactory, pickFactory } from "../events/EventTileFactory";
import { getMessageModerationState, isLocationEvent, MessageModerationState } from "./EventUtils";
import { ElementCall } from "../models/Call";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "../voice-broadcast";
const calcIsInfoMessage = (
eventType: EventType | string,
@ -38,8 +37,7 @@ const calcIsInfoMessage = (
eventType !== EventType.RoomCreate &&
!M_POLL_START.matches(eventType) &&
!M_POLL_END.matches(eventType) &&
!M_BEACON_INFO.matches(eventType) &&
!(eventType === VoiceBroadcastInfoEventType && content?.state === VoiceBroadcastInfoState.Started)
!M_BEACON_INFO.matches(eventType)
);
};
@ -91,8 +89,7 @@ export function getEventDisplayInfo(
(eventType === EventType.RoomMessage && msgtype === MsgType.Emote) ||
M_POLL_START.matches(eventType) ||
M_BEACON_INFO.matches(eventType) ||
isLocationEvent(mxEvent) ||
eventType === VoiceBroadcastInfoEventType;
isLocationEvent(mxEvent);
// If we're showing hidden events in the timeline, we should use the
// source tile when there's no regular tile for an event and also for

View File

@ -30,7 +30,6 @@ import { TimelineRenderingType } from "../contexts/RoomContext";
import { launchPollEditor } from "../components/views/messages/MPollBody";
import { Action } from "../dispatcher/actions";
import { ViewRoomPayload } from "../dispatcher/payloads/ViewRoomPayload";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "../voice-broadcast/types";
/**
* Returns whether an event should allow actions like reply, reactions, edit, etc.
@ -56,9 +55,7 @@ export function isContentActionable(mxEvent: MatrixEvent): boolean {
mxEvent.getType() === "m.sticker" ||
M_POLL_START.matches(mxEvent.getType()) ||
M_POLL_END.matches(mxEvent.getType()) ||
M_BEACON_INFO.matches(mxEvent.getType()) ||
(mxEvent.getType() === VoiceBroadcastInfoEventType &&
mxEvent.getContent()?.state === VoiceBroadcastInfoState.Started)
M_BEACON_INFO.matches(mxEvent.getType())
) {
return true;
}

View File

@ -1,181 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { isEqual } from "lodash";
import { Optional } from "matrix-events-sdk";
import { logger } from "matrix-js-sdk/src/logger";
import { TypedEventEmitter } from "matrix-js-sdk/src/matrix";
import { getChunkLength } from "..";
import { IRecordingUpdate, VoiceRecording } from "../../audio/VoiceRecording";
import { concat } from "../../utils/arrays";
import { IDestroyable } from "../../utils/IDestroyable";
import { Singleflight } from "../../utils/Singleflight";
export enum VoiceBroadcastRecorderEvent {
ChunkRecorded = "chunk_recorded",
CurrentChunkLengthUpdated = "current_chunk_length_updated",
}
interface EventMap {
[VoiceBroadcastRecorderEvent.ChunkRecorded]: (chunk: ChunkRecordedPayload) => void;
[VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated]: (length: number) => void;
}
export interface ChunkRecordedPayload {
buffer: Uint8Array;
length: number;
}
// char sequence of "OpusHead"
const OpusHead = [79, 112, 117, 115, 72, 101, 97, 100];
// char sequence of "OpusTags"
const OpusTags = [79, 112, 117, 115, 84, 97, 103, 115];
/**
* This class provides the function to seamlessly record fixed length chunks.
* Subscribe with on(VoiceBroadcastRecordingEvents.ChunkRecorded, (payload: ChunkRecordedPayload) => {})
* to retrieve chunks while recording.
*/
export class VoiceBroadcastRecorder
extends TypedEventEmitter<VoiceBroadcastRecorderEvent, EventMap>
implements IDestroyable
{
private opusHead?: Uint8Array;
private opusTags?: Uint8Array;
private chunkBuffer = new Uint8Array(0);
// position of the previous chunk in seconds
private previousChunkEndTimePosition = 0;
// current chunk length in seconds
private currentChunkLength = 0;
public constructor(
private voiceRecording: VoiceRecording,
public readonly targetChunkLength: number,
) {
super();
this.voiceRecording.onDataAvailable = this.onDataAvailable;
}
public async start(): Promise<void> {
await this.voiceRecording.start();
this.voiceRecording.liveData.onUpdate((data: IRecordingUpdate) => {
this.setCurrentChunkLength(data.timeSeconds - this.previousChunkEndTimePosition);
});
}
/**
* Stops the recording and returns the remaining chunk (if any).
*/
public async stop(): Promise<Optional<ChunkRecordedPayload>> {
try {
await this.voiceRecording.stop();
} catch {
// Ignore if the recording raises any error.
}
// forget about that call, so that we can stop it again later
Singleflight.forgetAllFor(this.voiceRecording);
const chunk = this.extractChunk();
this.currentChunkLength = 0;
this.previousChunkEndTimePosition = 0;
return chunk;
}
public get contentType(): string {
return this.voiceRecording.contentType;
}
private setCurrentChunkLength(currentChunkLength: number): void {
if (this.currentChunkLength === currentChunkLength) return;
this.currentChunkLength = currentChunkLength;
this.emit(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, currentChunkLength);
}
public getCurrentChunkLength(): number {
return this.currentChunkLength;
}
private onDataAvailable = (data: ArrayBuffer): void => {
const dataArray = new Uint8Array(data);
// extract the part, that contains the header type info
const headerType = Array.from(dataArray.slice(28, 36));
if (isEqual(OpusHead, headerType)) {
// data seems to be an "OpusHead" header
this.opusHead = dataArray;
return;
}
if (isEqual(OpusTags, headerType)) {
// data seems to be an "OpusTags" header
this.opusTags = dataArray;
return;
}
this.setCurrentChunkLength(this.voiceRecording.recorderSeconds! - this.previousChunkEndTimePosition);
this.handleData(dataArray);
};
private handleData(data: Uint8Array): void {
this.chunkBuffer = concat(this.chunkBuffer, data);
this.emitChunkIfTargetLengthReached();
}
private emitChunkIfTargetLengthReached(): void {
if (this.getCurrentChunkLength() >= this.targetChunkLength) {
this.emitAndResetChunk();
}
}
/**
* Extracts the current chunk and resets the buffer.
*/
private extractChunk(): Optional<ChunkRecordedPayload> {
if (this.chunkBuffer.length === 0) {
return null;
}
if (!this.opusHead || !this.opusTags) {
logger.warn("Broadcast chunk cannot be extracted. OpusHead or OpusTags is missing.");
return null;
}
const currentRecorderTime = this.voiceRecording.recorderSeconds!;
const payload: ChunkRecordedPayload = {
buffer: concat(this.opusHead!, this.opusTags!, this.chunkBuffer),
length: this.getCurrentChunkLength(),
};
this.chunkBuffer = new Uint8Array(0);
this.setCurrentChunkLength(0);
this.previousChunkEndTimePosition = currentRecorderTime;
return payload;
}
private emitAndResetChunk(): void {
if (this.chunkBuffer.length === 0) {
return;
}
this.emit(VoiceBroadcastRecorderEvent.ChunkRecorded, this.extractChunk()!);
}
public destroy(): void {
this.removeAllListeners();
this.voiceRecording.destroy();
}
}
export const createVoiceBroadcastRecorder = (): VoiceBroadcastRecorder => {
const voiceRecording = new VoiceRecording();
voiceRecording.disableMaxLength();
return new VoiceBroadcastRecorder(voiceRecording, getChunkLength());
};

View File

@ -1,58 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { useContext, useEffect, useState } from "react";
import { MatrixEvent, RelationType } from "matrix-js-sdk/src/matrix";
import {
VoiceBroadcastRecordingBody,
shouldDisplayAsVoiceBroadcastRecordingTile,
VoiceBroadcastInfoEventType,
VoiceBroadcastPlaybackBody,
VoiceBroadcastInfoState,
} from "..";
import { IBodyProps } from "../../components/views/messages/IBodyProps";
import { RelationsHelper, RelationsHelperEvent } from "../../events/RelationsHelper";
import { SDKContext } from "../../contexts/SDKContext";
import { useMatrixClientContext } from "../../contexts/MatrixClientContext";
export const VoiceBroadcastBody: React.FC<IBodyProps> = ({ mxEvent }) => {
const sdkContext = useContext(SDKContext);
const client = useMatrixClientContext();
const [infoState, setInfoState] = useState(mxEvent.getContent()?.state || VoiceBroadcastInfoState.Stopped);
useEffect(() => {
const onInfoEvent = (event: MatrixEvent): void => {
if (event.getContent()?.state === VoiceBroadcastInfoState.Stopped) {
// only a stopped event can change the tile state
setInfoState(VoiceBroadcastInfoState.Stopped);
}
};
const relationsHelper = new RelationsHelper(
mxEvent,
RelationType.Reference,
VoiceBroadcastInfoEventType,
client,
);
relationsHelper.on(RelationsHelperEvent.Add, onInfoEvent);
relationsHelper.emitCurrent();
return () => {
relationsHelper.destroy();
};
});
if (shouldDisplayAsVoiceBroadcastRecordingTile(infoState, client, mxEvent)) {
const recording = sdkContext.voiceBroadcastRecordingsStore.getByInfoEvent(mxEvent, client);
return <VoiceBroadcastRecordingBody recording={recording} />;
}
const playback = sdkContext.voiceBroadcastPlaybacksStore.getByInfoEvent(mxEvent, client);
return <VoiceBroadcastPlaybackBody playback={playback} />;
};

View File

@ -1,30 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import classNames from "classnames";
import React from "react";
import { Icon as LiveIcon } from "../../../../res/img/compound/live-16px.svg";
import { _t } from "../../../languageHandler";
interface Props {
grey?: boolean;
}
export const LiveBadge: React.FC<Props> = ({ grey = false }) => {
const liveBadgeClasses = classNames("mx_LiveBadge", {
"mx_LiveBadge--grey": grey,
});
return (
<div className={liveBadgeClasses}>
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("voice_broadcast|live")}
</div>
);
};

View File

@ -1,25 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
interface Props {
icon: React.FC<React.SVGProps<SVGSVGElement>>;
label: string;
onClick: () => void;
}
export const SeekButton: React.FC<Props> = ({ onClick, icon: Icon, label }) => {
return (
<AccessibleButton kind="secondary_content" onClick={onClick} aria-label={label}>
<Icon className="mx_Icon mx_Icon_24" />
</AccessibleButton>
);
};

View File

@ -1,31 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import classNames from "classnames";
import React, { ReactElement } from "react";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
interface Props {
className?: string;
icon: ReactElement;
label: string;
onClick: () => void;
}
export const VoiceBroadcastControl: React.FC<Props> = ({ className = "", icon, label, onClick }) => {
return (
<AccessibleButton
className={classNames("mx_VoiceBroadcastControl", className)}
onClick={onClick}
aria-label={label}
>
{icon}
</AccessibleButton>
);
};

View File

@ -1,23 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { WarningIcon } from "@vector-im/compound-design-tokens/assets/web/icons";
interface Props {
message: string;
}
export const VoiceBroadcastError: React.FC<Props> = ({ message }) => {
return (
<div className="mx_VoiceBroadcastRecordingConnectionError">
<WarningIcon className="mx_Icon mx_Icon_16" />
{message}
</div>
);
};

View File

@ -1,139 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { Room } from "matrix-js-sdk/src/matrix";
import classNames from "classnames";
import CloseIcon from "@vector-im/compound-design-tokens/assets/web/icons/close";
import MicrophoneIcon from "@vector-im/compound-design-tokens/assets/web/icons/mic-on-solid";
import { LiveBadge, VoiceBroadcastLiveness } from "../..";
import { Icon as LiveIcon } from "../../../../res/img/compound/live-16px.svg";
import { Icon as TimerIcon } from "../../../../res/img/compound/timer-16px.svg";
import { _t } from "../../../languageHandler";
import RoomAvatar from "../../../components/views/avatars/RoomAvatar";
import AccessibleButton, { ButtonEvent } from "../../../components/views/elements/AccessibleButton";
import Clock from "../../../components/views/audio_messages/Clock";
import { formatTimeLeft } from "../../../DateUtils";
import Spinner from "../../../components/views/elements/Spinner";
import { ViewRoomPayload } from "../../../dispatcher/payloads/ViewRoomPayload";
import { Action } from "../../../dispatcher/actions";
import dis from "../../../dispatcher/dispatcher";
interface VoiceBroadcastHeaderProps {
linkToRoom?: boolean;
live?: VoiceBroadcastLiveness;
liveBadgePosition?: "middle" | "right";
onCloseClick?: () => void;
onMicrophoneLineClick?: ((e: ButtonEvent) => void | Promise<void>) | null;
room: Room;
microphoneLabel?: string;
showBroadcast?: boolean;
showBuffering?: boolean;
bufferingPosition?: "line" | "title";
timeLeft?: number;
showClose?: boolean;
}
export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
linkToRoom = false,
live = "not-live",
liveBadgePosition = "right",
onCloseClick = (): void => {},
onMicrophoneLineClick = null,
room,
microphoneLabel,
showBroadcast = false,
showBuffering = false,
bufferingPosition = "line",
showClose = false,
timeLeft,
}) => {
const broadcast = showBroadcast && (
<div className="mx_VoiceBroadcastHeader_line">
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("voice_broadcast|action")}
</div>
);
const liveBadge = live !== "not-live" && <LiveBadge grey={live === "grey"} />;
const closeButton = showClose && (
<AccessibleButton onClick={onCloseClick}>
<CloseIcon className="mx_Icon mx_Icon_16" />
</AccessibleButton>
);
const timeLeftLine = timeLeft && (
<div className="mx_VoiceBroadcastHeader_line">
<TimerIcon className="mx_Icon mx_Icon_16" />
<Clock formatFn={formatTimeLeft} seconds={timeLeft} />
</div>
);
const bufferingLine = showBuffering && bufferingPosition === "line" && (
<div className="mx_VoiceBroadcastHeader_line">
<Spinner w={14} h={14} />
{_t("voice_broadcast|buffering")}
</div>
);
const microphoneLineClasses = classNames({
mx_VoiceBroadcastHeader_line: true,
["mx_VoiceBroadcastHeader_mic--clickable"]: onMicrophoneLineClick,
});
const microphoneLine = microphoneLabel && (
<AccessibleButton
className={microphoneLineClasses}
onClick={onMicrophoneLineClick}
title={_t("voip|change_input_device")}
>
<MicrophoneIcon className="mx_Icon mx_Icon_16" />
<span>{microphoneLabel}</span>
</AccessibleButton>
);
const onRoomAvatarOrNameClick = (): void => {
dis.dispatch<ViewRoomPayload>({
action: Action.ViewRoom,
room_id: room.roomId,
metricsTrigger: undefined, // other
});
};
let roomAvatar = <RoomAvatar room={room} size="32px" />;
let roomName = (
<div className="mx_VoiceBroadcastHeader_room_wrapper">
<div className="mx_VoiceBroadcastHeader_room">{room.name}</div>
{showBuffering && bufferingPosition === "title" && <Spinner w={12} h={12} />}
</div>
);
if (linkToRoom) {
roomAvatar = <AccessibleButton onClick={onRoomAvatarOrNameClick}>{roomAvatar}</AccessibleButton>;
roomName = <AccessibleButton onClick={onRoomAvatarOrNameClick}>{roomName}</AccessibleButton>;
}
return (
<div className="mx_VoiceBroadcastHeader">
{roomAvatar}
<div className="mx_VoiceBroadcastHeader_content">
{roomName}
{microphoneLine}
{timeLeftLine}
{broadcast}
{bufferingLine}
{liveBadgePosition === "middle" && liveBadge}
</div>
{liveBadgePosition === "right" && liveBadge}
{closeButton}
</div>
);
};

View File

@ -1,51 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022, 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { ReactElement } from "react";
import PauseIcon from "@vector-im/compound-design-tokens/assets/web/icons/pause-solid";
import PlayIcon from "@vector-im/compound-design-tokens/assets/web/icons/play-solid";
import { _t } from "../../../languageHandler";
import { VoiceBroadcastControl, VoiceBroadcastPlaybackState } from "../..";
interface Props {
onClick: () => void;
state: VoiceBroadcastPlaybackState;
}
export const VoiceBroadcastPlaybackControl: React.FC<Props> = ({ onClick, state }) => {
let controlIcon: ReactElement | null = null;
let controlLabel: string | null = null;
let className = "";
switch (state) {
case VoiceBroadcastPlaybackState.Stopped:
controlIcon = <PlayIcon className="mx_Icon mx_Icon_16" />;
className = "mx_VoiceBroadcastControl-play";
controlLabel = _t("voice_broadcast|play");
break;
case VoiceBroadcastPlaybackState.Paused:
controlIcon = <PlayIcon className="mx_Icon mx_Icon_16" />;
className = "mx_VoiceBroadcastControl-play";
controlLabel = _t("voice_broadcast|resume");
break;
case VoiceBroadcastPlaybackState.Buffering:
case VoiceBroadcastPlaybackState.Playing:
controlIcon = <PauseIcon className="mx_Icon mx_Icon_12" />;
controlLabel = _t("voice_broadcast|pause");
break;
}
if (controlIcon && controlLabel) {
return (
<VoiceBroadcastControl className={className} label={controlLabel} icon={controlIcon} onClick={onClick} />
);
}
return null;
};

View File

@ -1,21 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { WarningIcon } from "@vector-im/compound-design-tokens/assets/web/icons";
import { _t } from "../../../languageHandler";
export const VoiceBroadcastRecordingConnectionError: React.FC = () => {
return (
<div className="mx_VoiceBroadcastRecordingConnectionError">
<WarningIcon className="mx_Icon mx_Icon_16" />
{_t("voice_broadcast|connection_error")}
</div>
);
};

View File

@ -1,21 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { Icon as LiveIcon } from "../../../../res/img/compound/live-16px.svg";
import { _t } from "../../../languageHandler";
export const VoiceBroadcastRoomSubtitle: React.FC = () => {
return (
<div className="mx_RoomTile_subtitle mx_RoomTile_subtitle--voice-broadcast">
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("voice_broadcast|live")}
</div>
);
};

View File

@ -1,38 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import BaseDialog from "../../../components/views/dialogs/BaseDialog";
import DialogButtons from "../../../components/views/elements/DialogButtons";
import { _t } from "../../../languageHandler";
import Modal from "../../../Modal";
interface Props {
onFinished: (confirmed?: boolean) => void;
}
export const ConfirmListenBroadcastStopCurrentDialog: React.FC<Props> = ({ onFinished }) => {
return (
<BaseDialog title={_t("voice_broadcast|confirm_listen_title")} hasCancel={true} onFinished={onFinished}>
<p>{_t("voice_broadcast|confirm_listen_description")}</p>
<DialogButtons
onPrimaryButtonClick={() => onFinished(true)}
primaryButton={_t("voice_broadcast|confirm_listen_affirm")}
cancelButton={_t("action|no")}
onCancel={() => onFinished(false)}
/>
</BaseDialog>
);
};
export const showConfirmListenBroadcastStopCurrentDialog = async (): Promise<boolean> => {
const { finished } = Modal.createDialog(ConfirmListenBroadcastStopCurrentDialog);
const [confirmed] = await finished;
return !!confirmed;
};

View File

@ -1,102 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { ReactElement } from "react";
import classNames from "classnames";
import {
VoiceBroadcastError,
VoiceBroadcastHeader,
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackControl,
VoiceBroadcastPlaybackState,
} from "../..";
import { useVoiceBroadcastPlayback } from "../../hooks/useVoiceBroadcastPlayback";
import { Icon as Back30sIcon } from "../../../../res/img/compound/back-30s-24px.svg";
import { Icon as Forward30sIcon } from "../../../../res/img/compound/forward-30s-24px.svg";
import { _t } from "../../../languageHandler";
import Clock from "../../../components/views/audio_messages/Clock";
import SeekBar from "../../../components/views/audio_messages/SeekBar";
import { SeekButton } from "../atoms/SeekButton";
const SEEK_TIME = 30;
interface VoiceBroadcastPlaybackBodyProps {
pip?: boolean;
playback: VoiceBroadcastPlayback;
}
export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProps> = ({ pip = false, playback }) => {
const { times, liveness, playbackState, room, sender, toggle } = useVoiceBroadcastPlayback(playback);
let seekBackwardButton: ReactElement | null = null;
let seekForwardButton: ReactElement | null = null;
if (playbackState !== VoiceBroadcastPlaybackState.Stopped) {
const onSeekBackwardButtonClick = (): void => {
playback.skipTo(Math.max(0, times.position - SEEK_TIME));
};
seekBackwardButton = (
<SeekButton
icon={Back30sIcon}
label={_t("voice_broadcast|30s_backward")}
onClick={onSeekBackwardButtonClick}
/>
);
const onSeekForwardButtonClick = (): void => {
playback.skipTo(Math.min(times.duration, times.position + SEEK_TIME));
};
seekForwardButton = (
<SeekButton
icon={Forward30sIcon}
label={_t("voice_broadcast|30s_forward")}
onClick={onSeekForwardButtonClick}
/>
);
}
const classes = classNames({
mx_VoiceBroadcastBody: true,
["mx_VoiceBroadcastBody--pip"]: pip,
});
const content =
playbackState === VoiceBroadcastPlaybackState.Error ? (
<VoiceBroadcastError message={playback.errorMessage} />
) : (
<>
<div className="mx_VoiceBroadcastBody_controls">
{seekBackwardButton}
<VoiceBroadcastPlaybackControl state={playbackState} onClick={toggle} />
{seekForwardButton}
</div>
<SeekBar playback={playback} />
<div className="mx_VoiceBroadcastBody_timerow">
<Clock seconds={times.position} />
<Clock seconds={-times.timeLeft} />
</div>
</>
);
return (
<div className={classes}>
<VoiceBroadcastHeader
linkToRoom={pip}
live={liveness}
microphoneLabel={sender?.name}
room={room}
showBroadcast={playbackState !== VoiceBroadcastPlaybackState.Buffering}
showBuffering={playbackState === VoiceBroadcastPlaybackState.Buffering}
/>
{content}
</div>
);
};

View File

@ -1,82 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { useRef, useState } from "react";
import { VoiceBroadcastHeader } from "../..";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
import { VoiceBroadcastPreRecording } from "../../models/VoiceBroadcastPreRecording";
import { Icon as LiveIcon } from "../../../../res/img/compound/live-16px.svg";
import { _t } from "../../../languageHandler";
import { useAudioDeviceSelection } from "../../../hooks/useAudioDeviceSelection";
import { DevicesContextMenu } from "../../../components/views/audio_messages/DevicesContextMenu";
interface Props {
voiceBroadcastPreRecording: VoiceBroadcastPreRecording;
}
interface State {
showDeviceSelect: boolean;
disableStartButton: boolean;
}
export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({ voiceBroadcastPreRecording }) => {
const pipRef = useRef<HTMLDivElement | null>(null);
const { currentDevice, currentDeviceLabel, devices, setDevice } = useAudioDeviceSelection();
const [state, setState] = useState<State>({
showDeviceSelect: false,
disableStartButton: false,
});
const onDeviceSelect = (device: MediaDeviceInfo): void => {
setState((state) => ({
...state,
showDeviceSelect: false,
}));
setDevice(device);
};
const onStartBroadcastClick = (): void => {
setState((state) => ({
...state,
disableStartButton: true,
}));
voiceBroadcastPreRecording.start();
};
return (
<div className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip" ref={pipRef}>
<VoiceBroadcastHeader
linkToRoom={true}
onCloseClick={voiceBroadcastPreRecording.cancel}
onMicrophoneLineClick={(): void => setState({ ...state, showDeviceSelect: true })}
room={voiceBroadcastPreRecording.room}
microphoneLabel={currentDeviceLabel}
showClose={true}
/>
<AccessibleButton
className="mx_VoiceBroadcastBody_blockButton"
kind="danger"
onClick={onStartBroadcastClick}
disabled={state.disableStartButton}
>
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("voice_broadcast|go_live")}
</AccessibleButton>
{state.showDeviceSelect && (
<DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
/>
)}
</div>
);
};

View File

@ -1,31 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import {
useVoiceBroadcastRecording,
VoiceBroadcastHeader,
VoiceBroadcastRecording,
VoiceBroadcastRecordingConnectionError,
} from "../..";
interface VoiceBroadcastRecordingBodyProps {
recording: VoiceBroadcastRecording;
}
export const VoiceBroadcastRecordingBody: React.FC<VoiceBroadcastRecordingBodyProps> = ({ recording }) => {
const { live, room, sender, recordingState } = useVoiceBroadcastRecording(recording);
return (
<div className="mx_VoiceBroadcastBody">
<VoiceBroadcastHeader live={live ? "live" : "grey"} microphoneLabel={sender?.name} room={room} />
{recordingState === "connection_error" && <VoiceBroadcastRecordingConnectionError />}
</div>
);
};

View File

@ -1,116 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { useRef, useState } from "react";
import PauseIcon from "@vector-im/compound-design-tokens/assets/web/icons/pause-solid";
import MicrophoneIcon from "@vector-im/compound-design-tokens/assets/web/icons/mic-on-solid";
import {
VoiceBroadcastControl,
VoiceBroadcastInfoState,
VoiceBroadcastRecording,
VoiceBroadcastRecordingConnectionError,
VoiceBroadcastRecordingState,
} from "../..";
import { useVoiceBroadcastRecording } from "../../hooks/useVoiceBroadcastRecording";
import { VoiceBroadcastHeader } from "../atoms/VoiceBroadcastHeader";
import { Icon as StopIcon } from "../../../../res/img/compound/stop-16.svg";
import { Icon as RecordIcon } from "../../../../res/img/compound/record-10px.svg";
import { _t } from "../../../languageHandler";
import { useAudioDeviceSelection } from "../../../hooks/useAudioDeviceSelection";
import { DevicesContextMenu } from "../../../components/views/audio_messages/DevicesContextMenu";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
interface VoiceBroadcastRecordingPipProps {
recording: VoiceBroadcastRecording;
}
export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProps> = ({ recording }) => {
const pipRef = useRef<HTMLDivElement | null>(null);
const { live, timeLeft, recordingState, room, stopRecording, toggleRecording } =
useVoiceBroadcastRecording(recording);
const { currentDevice, devices, setDevice } = useAudioDeviceSelection();
const onDeviceSelect = async (device: MediaDeviceInfo): Promise<void> => {
setShowDeviceSelect(false);
if (currentDevice?.deviceId === device.deviceId) {
// device unchanged
return;
}
setDevice(device);
if (
(
[VoiceBroadcastInfoState.Paused, VoiceBroadcastInfoState.Stopped] as VoiceBroadcastRecordingState[]
).includes(recordingState)
) {
// Nothing to do in these cases. Resume will use the selected device.
return;
}
// pause and resume to switch the input device
await recording.pause();
await recording.resume();
};
const [showDeviceSelect, setShowDeviceSelect] = useState<boolean>(false);
const toggleControl =
recordingState === VoiceBroadcastInfoState.Paused ? (
<VoiceBroadcastControl
className="mx_VoiceBroadcastControl-recording"
onClick={toggleRecording}
icon={<RecordIcon className="mx_Icon mx_Icon_12" />}
label={_t("voice_broadcast|resume")}
/>
) : (
<VoiceBroadcastControl
onClick={toggleRecording}
icon={<PauseIcon className="mx_Icon mx_Icon_12" />}
label={_t("voice_broadcast|pause")}
/>
);
const controls =
recordingState === "connection_error" ? (
<VoiceBroadcastRecordingConnectionError />
) : (
<div className="mx_VoiceBroadcastBody_controls">
{toggleControl}
<AccessibleButton
onClick={(): void => setShowDeviceSelect(true)}
title={_t("voip|change_input_device")}
>
<MicrophoneIcon className="mx_Icon mx_Icon_16 mx_Icon_alert" />
</AccessibleButton>
<VoiceBroadcastControl
icon={<StopIcon className="mx_Icon mx_Icon_16" />}
label="Stop Recording"
onClick={stopRecording}
/>
</div>
);
return (
<div className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip" ref={pipRef}>
<VoiceBroadcastHeader linkToRoom={true} live={live ? "live" : "grey"} room={room} timeLeft={timeLeft} />
<hr className="mx_VoiceBroadcastBody_divider" />
{controls}
{showDeviceSelect && (
<DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
/>
)}
</div>
);
};

View File

@ -1,44 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import CloseIcon from "@vector-im/compound-design-tokens/assets/web/icons/close";
import {
VoiceBroadcastHeader,
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackControl,
VoiceBroadcastPlaybackState,
} from "../..";
import AccessibleButton from "../../../components/views/elements/AccessibleButton";
import { useVoiceBroadcastPlayback } from "../../hooks/useVoiceBroadcastPlayback";
interface VoiceBroadcastSmallPlaybackBodyProps {
playback: VoiceBroadcastPlayback;
}
export const VoiceBroadcastSmallPlaybackBody: React.FC<VoiceBroadcastSmallPlaybackBodyProps> = ({ playback }) => {
const { liveness, playbackState, room, sender, toggle } = useVoiceBroadcastPlayback(playback);
return (
<div className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip mx_VoiceBroadcastBody--small">
<VoiceBroadcastHeader
linkToRoom={true}
live={liveness}
liveBadgePosition="middle"
microphoneLabel={sender?.name}
room={room}
showBuffering={playbackState === VoiceBroadcastPlaybackState.Buffering}
bufferingPosition="title"
/>
<VoiceBroadcastPlaybackControl state={playbackState} onClick={toggle} />
<AccessibleButton onClick={() => playback.stop()}>
<CloseIcon className="mx_Icon mx_Icon_8 mx_VoiceBroadcastBody__small-close" />
</AccessibleButton>
</div>
);
};

View File

@ -1,32 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { useTypedEventEmitterState } from "../../hooks/useEventEmitter";
import { VoiceBroadcastPlayback } from "../models/VoiceBroadcastPlayback";
import {
VoiceBroadcastPlaybacksStore,
VoiceBroadcastPlaybacksStoreEvent,
} from "../stores/VoiceBroadcastPlaybacksStore";
export const useCurrentVoiceBroadcastPlayback = (
voiceBroadcastPlaybackStore: VoiceBroadcastPlaybacksStore,
): {
currentVoiceBroadcastPlayback: VoiceBroadcastPlayback | null;
} => {
const currentVoiceBroadcastPlayback = useTypedEventEmitterState(
voiceBroadcastPlaybackStore,
VoiceBroadcastPlaybacksStoreEvent.CurrentChanged,
(playback?: VoiceBroadcastPlayback) => {
return playback ?? voiceBroadcastPlaybackStore.getCurrent();
},
);
return {
currentVoiceBroadcastPlayback,
};
};

View File

@ -1,29 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { useTypedEventEmitterState } from "../../hooks/useEventEmitter";
import { VoiceBroadcastPreRecordingStore } from "../stores/VoiceBroadcastPreRecordingStore";
import { VoiceBroadcastPreRecording } from "../models/VoiceBroadcastPreRecording";
export const useCurrentVoiceBroadcastPreRecording = (
voiceBroadcastPreRecordingStore: VoiceBroadcastPreRecordingStore,
): {
currentVoiceBroadcastPreRecording: VoiceBroadcastPreRecording | null;
} => {
const currentVoiceBroadcastPreRecording = useTypedEventEmitterState(
voiceBroadcastPreRecordingStore,
"changed",
(preRecording?: VoiceBroadcastPreRecording) => {
return preRecording ?? voiceBroadcastPreRecordingStore.getCurrent();
},
);
return {
currentVoiceBroadcastPreRecording,
};
};

View File

@ -1,28 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { VoiceBroadcastRecording, VoiceBroadcastRecordingsStore, VoiceBroadcastRecordingsStoreEvent } from "..";
import { useTypedEventEmitterState } from "../../hooks/useEventEmitter";
export const useCurrentVoiceBroadcastRecording = (
voiceBroadcastRecordingsStore: VoiceBroadcastRecordingsStore,
): {
currentVoiceBroadcastRecording: VoiceBroadcastRecording | null;
} => {
const currentVoiceBroadcastRecording = useTypedEventEmitterState(
voiceBroadcastRecordingsStore,
VoiceBroadcastRecordingsStoreEvent.CurrentChanged,
(recording?: VoiceBroadcastRecording) => {
return recording ?? voiceBroadcastRecordingsStore.getCurrent();
},
);
return {
currentVoiceBroadcastRecording,
};
};

View File

@ -1,39 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { useContext, useEffect, useMemo, useState } from "react";
import { Room, RoomStateEvent } from "matrix-js-sdk/src/matrix";
import { hasRoomLiveVoiceBroadcast } from "../utils/hasRoomLiveVoiceBroadcast";
import { useTypedEventEmitter } from "../../hooks/useEventEmitter";
import { SDKContext } from "../../contexts/SDKContext";
export const useHasRoomLiveVoiceBroadcast = (room: Room): boolean => {
const sdkContext = useContext(SDKContext);
const [hasLiveVoiceBroadcast, setHasLiveVoiceBroadcast] = useState(false);
const update = useMemo(() => {
return sdkContext?.client
? () => {
hasRoomLiveVoiceBroadcast(sdkContext.client!, room).then(
({ hasBroadcast }) => {
setHasLiveVoiceBroadcast(hasBroadcast);
},
() => {}, // no update on error
);
}
: () => {}; // noop without client
}, [room, sdkContext, setHasLiveVoiceBroadcast]);
useEffect(() => {
update();
}, [update]);
useTypedEventEmitter(room.currentState, RoomStateEvent.Update, () => update());
return hasLiveVoiceBroadcast;
};

View File

@ -1,90 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022, 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { Room, RoomMember } from "matrix-js-sdk/src/matrix";
import { useTypedEventEmitterState } from "../../hooks/useEventEmitter";
import { MatrixClientPeg } from "../../MatrixClientPeg";
import {
VoiceBroadcastLiveness,
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackEvent,
VoiceBroadcastPlaybackState,
VoiceBroadcastPlaybackTimes,
} from "..";
export const useVoiceBroadcastPlayback = (
playback: VoiceBroadcastPlayback,
): {
times: {
duration: number;
position: number;
timeLeft: number;
};
sender: RoomMember | null;
liveness: VoiceBroadcastLiveness;
playbackState: VoiceBroadcastPlaybackState;
toggle(): void;
room: Room;
} => {
const client = MatrixClientPeg.safeGet();
const room = client.getRoom(playback.infoEvent.getRoomId());
if (!room) {
throw new Error(`Voice Broadcast room not found (event ${playback.infoEvent.getId()})`);
}
const sender = playback.infoEvent.sender;
if (!sender) {
throw new Error(`Voice Broadcast sender not found (event ${playback.infoEvent.getId()})`);
}
const playbackToggle = (): void => {
playback.toggle();
};
const playbackState = useTypedEventEmitterState(
playback,
VoiceBroadcastPlaybackEvent.StateChanged,
(state?: VoiceBroadcastPlaybackState) => {
return state ?? playback.getState();
},
);
const times = useTypedEventEmitterState(
playback,
VoiceBroadcastPlaybackEvent.TimesChanged,
(t?: VoiceBroadcastPlaybackTimes) => {
return (
t ?? {
duration: playback.durationSeconds,
position: playback.timeSeconds,
timeLeft: playback.timeLeftSeconds,
}
);
},
);
const liveness = useTypedEventEmitterState(
playback,
VoiceBroadcastPlaybackEvent.LivenessChanged,
(l?: VoiceBroadcastLiveness) => {
return l ?? playback.getLiveness();
},
);
return {
times,
liveness: liveness,
playbackState,
room: room,
sender,
toggle: playbackToggle,
};
};

View File

@ -1,96 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022, 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { Room, RoomMember } from "matrix-js-sdk/src/matrix";
import React from "react";
import {
VoiceBroadcastInfoState,
VoiceBroadcastRecording,
VoiceBroadcastRecordingEvent,
VoiceBroadcastRecordingState,
} from "..";
import QuestionDialog from "../../components/views/dialogs/QuestionDialog";
import { useTypedEventEmitterState } from "../../hooks/useEventEmitter";
import { _t } from "../../languageHandler";
import { MatrixClientPeg } from "../../MatrixClientPeg";
import Modal from "../../Modal";
const showStopBroadcastingDialog = async (): Promise<boolean> => {
const { finished } = Modal.createDialog(QuestionDialog, {
title: _t("voice_broadcast|confirm_stop_title"),
description: <p>{_t("voice_broadcast|confirm_stop_description")}</p>,
button: _t("voice_broadcast|confirm_stop_affirm"),
});
const [confirmed] = await finished;
return !!confirmed;
};
export const useVoiceBroadcastRecording = (
recording: VoiceBroadcastRecording,
): {
live: boolean;
timeLeft: number;
recordingState: VoiceBroadcastRecordingState;
room: Room;
sender: RoomMember | null;
stopRecording(): void;
toggleRecording(): void;
} => {
const client = MatrixClientPeg.safeGet();
const roomId = recording.infoEvent.getRoomId();
const room = client.getRoom(roomId);
if (!room) {
throw new Error("Unable to find voice broadcast room with Id: " + roomId);
}
const sender = recording.infoEvent.sender;
if (!sender) {
throw new Error(`Voice Broadcast sender not found (event ${recording.infoEvent.getId()})`);
}
const stopRecording = async (): Promise<void> => {
const confirmed = await showStopBroadcastingDialog();
if (confirmed) {
await recording.stop();
}
};
const recordingState = useTypedEventEmitterState(
recording,
VoiceBroadcastRecordingEvent.StateChanged,
(state?: VoiceBroadcastRecordingState) => {
return state ?? recording.getState();
},
);
const timeLeft = useTypedEventEmitterState(
recording,
VoiceBroadcastRecordingEvent.TimeLeftChanged,
(t?: number) => {
return t ?? recording.getTimeLeft();
},
);
const live = (
[VoiceBroadcastInfoState.Started, VoiceBroadcastInfoState.Resumed] as VoiceBroadcastRecordingState[]
).includes(recordingState);
return {
live,
timeLeft,
recordingState,
room,
sender,
stopRecording,
toggleRecording: recording.toggle,
};
};

View File

@ -1,57 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
/**
* Voice Broadcast module
* {@link https://github.com/vector-im/element-meta/discussions/632}
*/
export * from "./types";
export * from "./models/VoiceBroadcastPlayback";
export * from "./models/VoiceBroadcastPreRecording";
export * from "./models/VoiceBroadcastRecording";
export * from "./audio/VoiceBroadcastRecorder";
export * from "./components/VoiceBroadcastBody";
export * from "./components/atoms/LiveBadge";
export * from "./components/atoms/VoiceBroadcastControl";
export * from "./components/atoms/VoiceBroadcastError";
export * from "./components/atoms/VoiceBroadcastHeader";
export * from "./components/atoms/VoiceBroadcastPlaybackControl";
export * from "./components/atoms/VoiceBroadcastRecordingConnectionError";
export * from "./components/atoms/VoiceBroadcastRoomSubtitle";
export * from "./components/molecules/ConfirmListenBroadcastStopCurrent";
export * from "./components/molecules/VoiceBroadcastPlaybackBody";
export * from "./components/molecules/VoiceBroadcastSmallPlaybackBody";
export * from "./components/molecules/VoiceBroadcastPreRecordingPip";
export * from "./components/molecules/VoiceBroadcastRecordingBody";
export * from "./components/molecules/VoiceBroadcastRecordingPip";
export * from "./hooks/useCurrentVoiceBroadcastPreRecording";
export * from "./hooks/useCurrentVoiceBroadcastRecording";
export * from "./hooks/useHasRoomLiveVoiceBroadcast";
export * from "./hooks/useVoiceBroadcastRecording";
export * from "./stores/VoiceBroadcastPlaybacksStore";
export * from "./stores/VoiceBroadcastPreRecordingStore";
export * from "./stores/VoiceBroadcastRecordingsStore";
export * from "./utils/checkVoiceBroadcastPreConditions";
export * from "./utils/cleanUpBroadcasts";
export * from "./utils/doClearCurrentVoiceBroadcastPlaybackIfStopped";
export * from "./utils/doMaybeSetCurrentVoiceBroadcastPlayback";
export * from "./utils/getChunkLength";
export * from "./utils/getMaxBroadcastLength";
export * from "./utils/hasRoomLiveVoiceBroadcast";
export * from "./utils/isRelatedToVoiceBroadcast";
export * from "./utils/isVoiceBroadcastStartedEvent";
export * from "./utils/findRoomLiveVoiceBroadcastFromUserAndDevice";
export * from "./utils/retrieveStartedInfoEvent";
export * from "./utils/shouldDisplayAsVoiceBroadcastRecordingTile";
export * from "./utils/shouldDisplayAsVoiceBroadcastTile";
export * from "./utils/shouldDisplayAsVoiceBroadcastStoppedText";
export * from "./utils/startNewVoiceBroadcastRecording";
export * from "./utils/textForVoiceBroadcastStoppedEvent";
export * from "./utils/textForVoiceBroadcastStoppedEventWithoutLink";
export * from "./utils/VoiceBroadcastResumer";

View File

@ -1,651 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import {
EventType,
MatrixClient,
MatrixEvent,
MatrixEventEvent,
MsgType,
RelationType,
TypedEventEmitter,
} from "matrix-js-sdk/src/matrix";
import { SimpleObservable } from "matrix-widget-api";
import { logger } from "matrix-js-sdk/src/logger";
import { defer, IDeferred } from "matrix-js-sdk/src/utils";
import { Playback, PlaybackInterface, PlaybackState } from "../../audio/Playback";
import { PlaybackManager } from "../../audio/PlaybackManager";
import { UPDATE_EVENT } from "../../stores/AsyncStore";
import { MediaEventHelper } from "../../utils/MediaEventHelper";
import { IDestroyable } from "../../utils/IDestroyable";
import {
VoiceBroadcastLiveness,
VoiceBroadcastInfoEventType,
VoiceBroadcastInfoState,
VoiceBroadcastInfoEventContent,
VoiceBroadcastRecordingsStore,
showConfirmListenBroadcastStopCurrentDialog,
} from "..";
import { RelationsHelper, RelationsHelperEvent } from "../../events/RelationsHelper";
import { VoiceBroadcastChunkEvents } from "../utils/VoiceBroadcastChunkEvents";
import { determineVoiceBroadcastLiveness } from "../utils/determineVoiceBroadcastLiveness";
import { _t } from "../../languageHandler";
export enum VoiceBroadcastPlaybackState {
Paused = "pause",
Playing = "playing",
Stopped = "stopped",
Buffering = "buffering",
Error = "error",
}
export enum VoiceBroadcastPlaybackEvent {
TimesChanged = "times_changed",
LivenessChanged = "liveness_changed",
StateChanged = "state_changed",
InfoStateChanged = "info_state_changed",
}
export type VoiceBroadcastPlaybackTimes = {
duration: number;
position: number;
timeLeft: number;
};
interface EventMap {
[VoiceBroadcastPlaybackEvent.TimesChanged]: (times: VoiceBroadcastPlaybackTimes) => void;
[VoiceBroadcastPlaybackEvent.LivenessChanged]: (liveness: VoiceBroadcastLiveness) => void;
[VoiceBroadcastPlaybackEvent.StateChanged]: (
state: VoiceBroadcastPlaybackState,
playback: VoiceBroadcastPlayback,
) => void;
[VoiceBroadcastPlaybackEvent.InfoStateChanged]: (state: VoiceBroadcastInfoState) => void;
}
export class VoiceBroadcastPlayback
extends TypedEventEmitter<VoiceBroadcastPlaybackEvent, EventMap>
implements IDestroyable, PlaybackInterface
{
private state = VoiceBroadcastPlaybackState.Stopped;
private chunkEvents = new VoiceBroadcastChunkEvents();
/** @var Map: event Id → undecryptable event */
private utdChunkEvents: Map<string, MatrixEvent> = new Map();
private playbacks = new Map<string, Playback>();
private currentlyPlaying: MatrixEvent | null = null;
/** @var total duration of all chunks in milliseconds */
private duration = 0;
/** @var current playback position in milliseconds */
private position = 0;
public readonly liveData = new SimpleObservable<number[]>();
private liveness: VoiceBroadcastLiveness = "not-live";
// set via addInfoEvent() in constructor
private infoState!: VoiceBroadcastInfoState;
private lastInfoEvent!: MatrixEvent;
// set via setUpRelationsHelper() in constructor
private chunkRelationHelper!: RelationsHelper;
private infoRelationHelper!: RelationsHelper;
private skipToNext?: number;
private skipToDeferred?: IDeferred<void>;
public constructor(
public readonly infoEvent: MatrixEvent,
private client: MatrixClient,
private recordings: VoiceBroadcastRecordingsStore,
) {
super();
this.addInfoEvent(this.infoEvent);
this.infoEvent.on(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
this.setUpRelationsHelper();
}
private async setUpRelationsHelper(): Promise<void> {
this.infoRelationHelper = new RelationsHelper(
this.infoEvent,
RelationType.Reference,
VoiceBroadcastInfoEventType,
this.client,
);
this.infoRelationHelper.getCurrent().forEach(this.addInfoEvent);
if (this.infoState !== VoiceBroadcastInfoState.Stopped) {
// Only required if not stopped. Stopped is the final state.
this.infoRelationHelper.on(RelationsHelperEvent.Add, this.addInfoEvent);
try {
await this.infoRelationHelper.emitFetchCurrent();
} catch (err) {
logger.warn("error fetching server side relation for voice broadcast info", err);
// fall back to local events
this.infoRelationHelper.emitCurrent();
}
}
this.chunkRelationHelper = new RelationsHelper(
this.infoEvent,
RelationType.Reference,
EventType.RoomMessage,
this.client,
);
this.chunkRelationHelper.on(RelationsHelperEvent.Add, this.addChunkEvent);
try {
// TODO Michael W: only fetch events if needed, blocked by PSF-1708
await this.chunkRelationHelper.emitFetchCurrent();
} catch (err) {
logger.warn("error fetching server side relation for voice broadcast chunks", err);
// fall back to local events
this.chunkRelationHelper.emitCurrent();
}
}
private addChunkEvent = async (event: MatrixEvent): Promise<boolean> => {
if (!event.getId() && !event.getTxnId()) {
// skip events without id and txn id
return false;
}
if (event.isDecryptionFailure()) {
this.onChunkEventDecryptionFailure(event);
return false;
}
if (event.getContent()?.msgtype !== MsgType.Audio) {
// skip non-audio event
return false;
}
this.chunkEvents.addEvent(event);
this.setDuration(this.chunkEvents.getLength());
if (this.getState() === VoiceBroadcastPlaybackState.Buffering) {
await this.startOrPlayNext();
}
return true;
};
private onChunkEventDecryptionFailure = (event: MatrixEvent): void => {
const eventId = event.getId();
if (!eventId) {
// This should not happen, as the existence of the Id is checked before the call.
// Log anyway and return.
logger.warn("Broadcast chunk decryption failure for event without Id", {
broadcast: this.infoEvent.getId(),
});
return;
}
if (!this.utdChunkEvents.has(eventId)) {
event.once(MatrixEventEvent.Decrypted, this.onChunkEventDecrypted);
}
this.utdChunkEvents.set(eventId, event);
this.setError();
};
private onChunkEventDecrypted = async (event: MatrixEvent): Promise<void> => {
const eventId = event.getId();
if (!eventId) {
// This should not happen, as the existence of the Id is checked before the call.
// Log anyway and return.
logger.warn("Broadcast chunk decrypted for event without Id", { broadcast: this.infoEvent.getId() });
return;
}
this.utdChunkEvents.delete(eventId);
await this.addChunkEvent(event);
if (this.utdChunkEvents.size === 0) {
// no more UTD events, recover from error to paused
this.setState(VoiceBroadcastPlaybackState.Paused);
}
};
private startOrPlayNext = async (): Promise<void> => {
if (this.currentlyPlaying) {
return this.playNext();
}
return await this.start();
};
private addInfoEvent = (event: MatrixEvent): void => {
if (this.lastInfoEvent && this.lastInfoEvent.getTs() >= event.getTs()) {
// Only handle newer events
return;
}
const state = event.getContent()?.state;
if (!Object.values(VoiceBroadcastInfoState).includes(state)) {
// Do not handle unknown voice broadcast states
return;
}
this.lastInfoEvent = event;
this.setInfoState(state);
};
private onBeforeRedaction = (): void => {
if (this.getState() !== VoiceBroadcastPlaybackState.Stopped) {
this.stop();
// destroy cleans up everything
this.destroy();
}
};
private async tryLoadPlayback(chunkEvent: MatrixEvent): Promise<void> {
try {
return await this.loadPlayback(chunkEvent);
} catch (err: any) {
logger.warn("Unable to load broadcast playback", {
message: err.message,
broadcastId: this.infoEvent.getId(),
chunkId: chunkEvent.getId(),
});
this.setError();
}
}
private async loadPlayback(chunkEvent: MatrixEvent): Promise<void> {
const eventId = chunkEvent.getId();
if (!eventId) {
throw new Error("Broadcast chunk event without Id occurred");
}
const helper = new MediaEventHelper(chunkEvent);
const blob = await helper.sourceBlob.value;
const buffer = await blob.arrayBuffer();
const playback = PlaybackManager.instance.createPlaybackInstance(buffer);
await playback.prepare();
playback.clockInfo.populatePlaceholdersFrom(chunkEvent);
this.playbacks.set(eventId, playback);
playback.on(UPDATE_EVENT, (state) => this.onPlaybackStateChange(chunkEvent, state));
playback.clockInfo.liveData.onUpdate(([position]) => {
this.onPlaybackPositionUpdate(chunkEvent, position);
});
}
private unloadPlayback(event: MatrixEvent): void {
const playback = this.playbacks.get(event.getId()!);
if (!playback) return;
playback.destroy();
this.playbacks.delete(event.getId()!);
}
private onPlaybackPositionUpdate = (event: MatrixEvent, position: number): void => {
if (event !== this.currentlyPlaying) return;
const newPosition = this.chunkEvents.getLengthTo(event) + position * 1000; // observable sends seconds
// do not jump backwards - this can happen when transiting from one to another chunk
if (newPosition < this.position) return;
this.setPosition(newPosition);
};
private setDuration(duration: number): void {
if (this.duration === duration) return;
this.duration = duration;
this.emitTimesChanged();
this.liveData.update([this.timeSeconds, this.durationSeconds]);
}
private setPosition(position: number): void {
if (this.position === position) return;
this.position = position;
this.emitTimesChanged();
this.liveData.update([this.timeSeconds, this.durationSeconds]);
}
private emitTimesChanged(): void {
this.emit(VoiceBroadcastPlaybackEvent.TimesChanged, {
duration: this.durationSeconds,
position: this.timeSeconds,
timeLeft: this.timeLeftSeconds,
});
}
private onPlaybackStateChange = async (event: MatrixEvent, newState: PlaybackState): Promise<void> => {
if (event !== this.currentlyPlaying) return;
if (newState !== PlaybackState.Stopped) return;
await this.playNext();
this.unloadPlayback(event);
};
private async playNext(): Promise<void> {
if (!this.currentlyPlaying) return;
const next = this.chunkEvents.getNext(this.currentlyPlaying);
if (next) {
return this.playEvent(next);
}
if (
this.getInfoState() === VoiceBroadcastInfoState.Stopped &&
this.chunkEvents.getSequenceForEvent(this.currentlyPlaying) === this.lastChunkSequence
) {
this.stop();
} else {
// No more chunks available, although the broadcast is not finished → enter buffering state.
this.setState(VoiceBroadcastPlaybackState.Buffering);
}
}
/**
* @returns {number} The last chunk sequence from the latest info event.
* Falls back to the length of received chunks if the info event does not provide the number.
*/
private get lastChunkSequence(): number {
return (
this.lastInfoEvent.getContent<VoiceBroadcastInfoEventContent>()?.last_chunk_sequence ||
this.chunkEvents.getNumberOfEvents()
);
}
private async playEvent(event: MatrixEvent): Promise<void> {
this.setState(VoiceBroadcastPlaybackState.Playing);
this.currentlyPlaying = event;
const playback = await this.tryGetOrLoadPlaybackForEvent(event);
playback?.play();
}
private async tryGetOrLoadPlaybackForEvent(event: MatrixEvent): Promise<Playback | undefined> {
try {
return await this.getOrLoadPlaybackForEvent(event);
} catch (err: any) {
logger.warn("Unable to load broadcast playback", {
message: err.message,
broadcastId: this.infoEvent.getId(),
chunkId: event.getId(),
});
this.setError();
}
}
private async getOrLoadPlaybackForEvent(event: MatrixEvent): Promise<Playback | undefined> {
const eventId = event.getId();
if (!eventId) {
throw new Error("Broadcast chunk event without Id occurred");
}
if (!this.playbacks.has(eventId)) {
// set to buffering while loading the chunk data
const currentState = this.getState();
this.setState(VoiceBroadcastPlaybackState.Buffering);
await this.loadPlayback(event);
this.setState(currentState);
}
const playback = this.playbacks.get(eventId);
if (!playback) {
throw new Error(`Unable to find playback for event ${event.getId()}`);
}
// try to load the playback for the next event for a smooth(er) playback
const nextEvent = this.chunkEvents.getNext(event);
if (nextEvent) this.tryLoadPlayback(nextEvent);
return playback;
}
private getCurrentPlayback(): Playback | undefined {
if (!this.currentlyPlaying) return;
return this.playbacks.get(this.currentlyPlaying.getId()!);
}
public getLiveness(): VoiceBroadcastLiveness {
return this.liveness;
}
private setLiveness(liveness: VoiceBroadcastLiveness): void {
if (this.liveness === liveness) return;
this.liveness = liveness;
this.emit(VoiceBroadcastPlaybackEvent.LivenessChanged, liveness);
}
public get currentState(): PlaybackState {
return PlaybackState.Playing;
}
public get timeSeconds(): number {
return this.position / 1000;
}
public get durationSeconds(): number {
return this.duration / 1000;
}
public get timeLeftSeconds(): number {
// Sometimes the meta data and the audio files are a little bit out of sync.
// Be sure it never returns a negative value.
return Math.max(0, Math.round(this.durationSeconds) - this.timeSeconds);
}
public async skipTo(timeSeconds: number): Promise<void> {
this.skipToNext = timeSeconds;
if (this.skipToDeferred) {
// Skip to position is already in progress. Return the promise for that.
return this.skipToDeferred.promise;
}
this.skipToDeferred = defer();
while (this.skipToNext !== undefined) {
// Skip to position until skipToNext is undefined.
// skipToNext can be set if skipTo is called while already skipping.
const skipToNext = this.skipToNext;
this.skipToNext = undefined;
await this.doSkipTo(skipToNext);
}
this.skipToDeferred.resolve();
this.skipToDeferred = undefined;
}
private async doSkipTo(timeSeconds: number): Promise<void> {
const time = timeSeconds * 1000;
const event = this.chunkEvents.findByTime(time);
if (!event) {
logger.warn("voice broadcast chunk event to skip to not found");
return;
}
const currentPlayback = this.getCurrentPlayback();
const skipToPlayback = await this.tryGetOrLoadPlaybackForEvent(event);
const currentPlaybackEvent = this.currentlyPlaying;
if (!skipToPlayback) {
logger.warn("voice broadcast chunk to skip to not found", event);
return;
}
this.currentlyPlaying = event;
if (currentPlayback && currentPlaybackEvent && currentPlayback !== skipToPlayback) {
// only stop and unload the playback here without triggering other effects, e.g. play next
currentPlayback.off(UPDATE_EVENT, this.onPlaybackStateChange);
await currentPlayback.stop();
currentPlayback.on(UPDATE_EVENT, this.onPlaybackStateChange);
this.unloadPlayback(currentPlaybackEvent);
}
const offsetInChunk = time - this.chunkEvents.getLengthTo(event);
await skipToPlayback.skipTo(offsetInChunk / 1000);
if (this.state === VoiceBroadcastPlaybackState.Playing && !skipToPlayback.isPlaying) {
await skipToPlayback.play();
}
this.setPosition(time);
}
public async start(): Promise<void> {
if (this.state === VoiceBroadcastPlaybackState.Playing) return;
const currentRecording = this.recordings.getCurrent();
if (currentRecording && currentRecording.getState() !== VoiceBroadcastInfoState.Stopped) {
const shouldStopRecording = await showConfirmListenBroadcastStopCurrentDialog();
if (!shouldStopRecording) {
// keep recording
return;
}
await this.recordings.getCurrent()?.stop();
}
const chunkEvents = this.chunkEvents.getEvents();
const toPlay =
this.getInfoState() === VoiceBroadcastInfoState.Stopped
? chunkEvents[0] // start at the beginning for an ended voice broadcast
: chunkEvents[chunkEvents.length - 1]; // start at the current chunk for an ongoing voice broadcast
if (toPlay) {
return this.playEvent(toPlay);
}
this.setState(VoiceBroadcastPlaybackState.Buffering);
}
public stop(): void {
// error is a final state
if (this.getState() === VoiceBroadcastPlaybackState.Error) return;
this.setState(VoiceBroadcastPlaybackState.Stopped);
this.getCurrentPlayback()?.stop();
this.currentlyPlaying = null;
this.setPosition(0);
}
public pause(): void {
// error is a final state
if (this.getState() === VoiceBroadcastPlaybackState.Error) return;
// stopped voice broadcasts cannot be paused
if (this.getState() === VoiceBroadcastPlaybackState.Stopped) return;
this.setState(VoiceBroadcastPlaybackState.Paused);
this.getCurrentPlayback()?.pause();
}
public resume(): void {
// error is a final state
if (this.getState() === VoiceBroadcastPlaybackState.Error) return;
if (!this.currentlyPlaying) {
// no playback to resume, start from the beginning
this.start();
return;
}
this.setState(VoiceBroadcastPlaybackState.Playing);
this.getCurrentPlayback()?.play();
}
/**
* Toggles the playback:
* stopped playing
* playing paused
* paused playing
*/
public async toggle(): Promise<void> {
// error is a final state
if (this.getState() === VoiceBroadcastPlaybackState.Error) return;
if (this.state === VoiceBroadcastPlaybackState.Stopped) {
await this.start();
return;
}
if (this.state === VoiceBroadcastPlaybackState.Paused) {
this.resume();
return;
}
this.pause();
}
public getState(): VoiceBroadcastPlaybackState {
return this.state;
}
private setState(state: VoiceBroadcastPlaybackState): void {
if (this.state === state) {
return;
}
this.state = state;
this.emit(VoiceBroadcastPlaybackEvent.StateChanged, state, this);
}
/**
* Set error state. Stop current playback, if any.
*/
private setError(): void {
this.setState(VoiceBroadcastPlaybackState.Error);
this.getCurrentPlayback()?.stop();
this.currentlyPlaying = null;
this.setPosition(0);
}
public getInfoState(): VoiceBroadcastInfoState {
return this.infoState;
}
private setInfoState(state: VoiceBroadcastInfoState): void {
if (this.infoState === state) {
return;
}
this.infoState = state;
this.emit(VoiceBroadcastPlaybackEvent.InfoStateChanged, state);
this.setLiveness(determineVoiceBroadcastLiveness(this.infoState));
}
public get errorMessage(): string {
if (this.getState() !== VoiceBroadcastPlaybackState.Error) return "";
if (this.utdChunkEvents.size) return _t("voice_broadcast|failed_decrypt");
return _t("voice_broadcast|failed_generic");
}
public destroy(): void {
for (const [, utdEvent] of this.utdChunkEvents) {
utdEvent.off(MatrixEventEvent.Decrypted, this.onChunkEventDecrypted);
}
this.utdChunkEvents.clear();
this.chunkRelationHelper.destroy();
this.infoRelationHelper.destroy();
this.removeAllListeners();
this.chunkEvents = new VoiceBroadcastChunkEvents();
this.playbacks.forEach((p) => p.destroy());
this.playbacks = new Map<string, Playback>();
}
}

View File

@ -1,48 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, Room, RoomMember, TypedEventEmitter } from "matrix-js-sdk/src/matrix";
import { IDestroyable } from "../../utils/IDestroyable";
import { VoiceBroadcastPlaybacksStore } from "../stores/VoiceBroadcastPlaybacksStore";
import { VoiceBroadcastRecordingsStore } from "../stores/VoiceBroadcastRecordingsStore";
import { startNewVoiceBroadcastRecording } from "../utils/startNewVoiceBroadcastRecording";
type VoiceBroadcastPreRecordingEvent = "dismiss";
interface EventMap {
dismiss: (voiceBroadcastPreRecording: VoiceBroadcastPreRecording) => void;
}
export class VoiceBroadcastPreRecording
extends TypedEventEmitter<VoiceBroadcastPreRecordingEvent, EventMap>
implements IDestroyable
{
public constructor(
public room: Room,
public sender: RoomMember,
private client: MatrixClient,
private playbacksStore: VoiceBroadcastPlaybacksStore,
private recordingsStore: VoiceBroadcastRecordingsStore,
) {
super();
}
public start = async (): Promise<void> => {
await startNewVoiceBroadcastRecording(this.room, this.client, this.playbacksStore, this.recordingsStore);
this.emit("dismiss", this);
};
public cancel = (): void => {
this.emit("dismiss", this);
};
public destroy(): void {
this.removeAllListeners();
}
}

View File

@ -1,441 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { logger } from "matrix-js-sdk/src/logger";
import {
ClientEvent,
ClientEventHandlerMap,
EventType,
MatrixClient,
MatrixEvent,
MatrixEventEvent,
MsgType,
RelationType,
TypedEventEmitter,
} from "matrix-js-sdk/src/matrix";
import { AudioContent, EncryptedFile } from "matrix-js-sdk/src/types";
import {
ChunkRecordedPayload,
createVoiceBroadcastRecorder,
getMaxBroadcastLength,
VoiceBroadcastInfoEventContent,
VoiceBroadcastInfoEventType,
VoiceBroadcastInfoState,
VoiceBroadcastRecorder,
VoiceBroadcastRecorderEvent,
} from "..";
import { uploadFile } from "../../ContentMessages";
import { createVoiceMessageContent } from "../../utils/createVoiceMessageContent";
import { IDestroyable } from "../../utils/IDestroyable";
import dis from "../../dispatcher/dispatcher";
import { ActionPayload } from "../../dispatcher/payloads";
import { VoiceBroadcastChunkEvents } from "../utils/VoiceBroadcastChunkEvents";
import { RelationsHelper, RelationsHelperEvent } from "../../events/RelationsHelper";
import { createReconnectedListener } from "../../utils/connection";
import { localNotificationsAreSilenced } from "../../utils/notifications";
import { BackgroundAudio } from "../../audio/BackgroundAudio";
export enum VoiceBroadcastRecordingEvent {
StateChanged = "liveness_changed",
TimeLeftChanged = "time_left_changed",
}
export type VoiceBroadcastRecordingState = VoiceBroadcastInfoState | "connection_error";
interface EventMap {
[VoiceBroadcastRecordingEvent.StateChanged]: (state: VoiceBroadcastRecordingState) => void;
[VoiceBroadcastRecordingEvent.TimeLeftChanged]: (timeLeft: number) => void;
}
export class VoiceBroadcastRecording
extends TypedEventEmitter<VoiceBroadcastRecordingEvent, EventMap>
implements IDestroyable
{
private state: VoiceBroadcastRecordingState;
private recorder: VoiceBroadcastRecorder | null = null;
private dispatcherRef: string;
private chunkEvents = new VoiceBroadcastChunkEvents();
private chunkRelationHelper: RelationsHelper;
private maxLength: number;
private timeLeft: number;
private toRetry: Array<() => Promise<void>> = [];
private reconnectedListener: ClientEventHandlerMap[ClientEvent.Sync];
private roomId: string;
private infoEventId: string;
private backgroundAudio = new BackgroundAudio();
/**
* Broadcast chunks have a sequence number to bring them in the correct order and to know if a message is missing.
* This variable holds the last sequence number.
* Starts with 0 because there is no chunk at the beginning of a broadcast.
* Will be incremented when a chunk message is created.
*/
private sequence = 0;
public constructor(
public readonly infoEvent: MatrixEvent,
private client: MatrixClient,
initialState?: VoiceBroadcastInfoState,
) {
super();
this.maxLength = getMaxBroadcastLength();
this.timeLeft = this.maxLength;
this.infoEventId = this.determineEventIdFromInfoEvent();
this.roomId = this.determineRoomIdFromInfoEvent();
if (initialState) {
this.state = initialState;
} else {
this.state = this.determineInitialStateFromInfoEvent();
}
// TODO Michael W: listen for state updates
this.infoEvent.on(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
this.dispatcherRef = dis.register(this.onAction);
this.chunkRelationHelper = this.initialiseChunkEventRelation();
this.reconnectedListener = createReconnectedListener(this.onReconnect);
this.client.on(ClientEvent.Sync, this.reconnectedListener);
}
private initialiseChunkEventRelation(): RelationsHelper {
const relationsHelper = new RelationsHelper(
this.infoEvent,
RelationType.Reference,
EventType.RoomMessage,
this.client,
);
relationsHelper.on(RelationsHelperEvent.Add, this.onChunkEvent);
relationsHelper.emitFetchCurrent().catch((err) => {
logger.warn("error fetching server side relation for voice broadcast chunks", err);
// fall back to local events
relationsHelper.emitCurrent();
});
return relationsHelper;
}
private onChunkEvent = (event: MatrixEvent): void => {
if (
(!event.getId() && !event.getTxnId()) ||
event.getContent()?.msgtype !== MsgType.Audio // don't add non-audio event
) {
return;
}
this.chunkEvents.addEvent(event);
};
private determineEventIdFromInfoEvent(): string {
const infoEventId = this.infoEvent.getId();
if (!infoEventId) {
throw new Error("Cannot create broadcast for info event without Id.");
}
return infoEventId;
}
private determineRoomIdFromInfoEvent(): string {
const roomId = this.infoEvent.getRoomId();
if (!roomId) {
throw new Error(`Cannot create broadcast for unknown room (info event ${this.infoEventId})`);
}
return roomId;
}
/**
* Determines the initial broadcast state.
* Checks all related events. If one has the "stopped" state stopped, else started.
*/
private determineInitialStateFromInfoEvent(): VoiceBroadcastRecordingState {
const room = this.client.getRoom(this.roomId);
const relations = room
?.getUnfilteredTimelineSet()
?.relations?.getChildEventsForEvent(this.infoEventId, RelationType.Reference, VoiceBroadcastInfoEventType);
const relatedEvents = relations?.getRelations();
return !relatedEvents?.find((event: MatrixEvent) => {
return event.getContent()?.state === VoiceBroadcastInfoState.Stopped;
})
? VoiceBroadcastInfoState.Started
: VoiceBroadcastInfoState.Stopped;
}
public getTimeLeft(): number {
return this.timeLeft;
}
/**
* Retries failed actions on reconnect.
*/
private onReconnect = async (): Promise<void> => {
// Do nothing if not in connection_error state.
if (this.state !== "connection_error") return;
// Copy the array, so that it is possible to remove elements from it while iterating over the original.
const toRetryCopy = [...this.toRetry];
for (const retryFn of this.toRetry) {
try {
await retryFn();
// Successfully retried. Remove from array copy.
toRetryCopy.splice(toRetryCopy.indexOf(retryFn), 1);
} catch {
// The current retry callback failed. Stop the loop.
break;
}
}
this.toRetry = toRetryCopy;
if (this.toRetry.length === 0) {
// Everything has been successfully retried. Recover from error state to paused.
await this.pause();
}
};
private async setTimeLeft(timeLeft: number): Promise<void> {
if (timeLeft <= 0) {
// time is up - stop the recording
return await this.stop();
}
// do never increase time left; no action if equals
if (timeLeft >= this.timeLeft) return;
this.timeLeft = timeLeft;
this.emit(VoiceBroadcastRecordingEvent.TimeLeftChanged, timeLeft);
}
public async start(): Promise<void> {
return this.getRecorder().start();
}
public async stop(): Promise<void> {
if (this.state === VoiceBroadcastInfoState.Stopped) return;
this.setState(VoiceBroadcastInfoState.Stopped);
await this.stopRecorder();
await this.sendInfoStateEvent(VoiceBroadcastInfoState.Stopped);
}
public async pause(): Promise<void> {
// stopped or already paused recordings cannot be paused
if (
(
[VoiceBroadcastInfoState.Stopped, VoiceBroadcastInfoState.Paused] as VoiceBroadcastRecordingState[]
).includes(this.state)
)
return;
this.setState(VoiceBroadcastInfoState.Paused);
await this.stopRecorder();
await this.sendInfoStateEvent(VoiceBroadcastInfoState.Paused);
}
public async resume(): Promise<void> {
if (this.state !== VoiceBroadcastInfoState.Paused) return;
this.setState(VoiceBroadcastInfoState.Resumed);
await this.getRecorder().start();
await this.sendInfoStateEvent(VoiceBroadcastInfoState.Resumed);
}
public toggle = async (): Promise<void> => {
if (this.getState() === VoiceBroadcastInfoState.Paused) return this.resume();
if (
(
[VoiceBroadcastInfoState.Started, VoiceBroadcastInfoState.Resumed] as VoiceBroadcastRecordingState[]
).includes(this.getState())
) {
return this.pause();
}
};
public getState(): VoiceBroadcastRecordingState {
return this.state;
}
private getRecorder(): VoiceBroadcastRecorder {
if (!this.recorder) {
this.recorder = createVoiceBroadcastRecorder();
this.recorder.on(VoiceBroadcastRecorderEvent.ChunkRecorded, this.onChunkRecorded);
this.recorder.on(VoiceBroadcastRecorderEvent.CurrentChunkLengthUpdated, this.onCurrentChunkLengthUpdated);
}
return this.recorder;
}
public async destroy(): Promise<void> {
if (this.recorder) {
this.recorder.stop();
this.recorder.destroy();
}
this.infoEvent.off(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
this.removeAllListeners();
dis.unregister(this.dispatcherRef);
this.chunkEvents = new VoiceBroadcastChunkEvents();
this.chunkRelationHelper.destroy();
this.client.off(ClientEvent.Sync, this.reconnectedListener);
}
private onBeforeRedaction = (): void => {
if (this.getState() !== VoiceBroadcastInfoState.Stopped) {
this.setState(VoiceBroadcastInfoState.Stopped);
// destroy cleans up everything
this.destroy();
}
};
private onAction = (payload: ActionPayload): void => {
if (payload.action !== "call_state") return;
// pause on any call action
this.pause();
};
private setState(state: VoiceBroadcastRecordingState): void {
this.state = state;
this.emit(VoiceBroadcastRecordingEvent.StateChanged, this.state);
}
private onCurrentChunkLengthUpdated = (currentChunkLength: number): void => {
this.setTimeLeft(this.maxLength - this.chunkEvents.getLengthSeconds() - currentChunkLength);
};
private onChunkRecorded = async (chunk: ChunkRecordedPayload): Promise<void> => {
const uploadAndSendFn = async (): Promise<void> => {
const { url, file } = await this.uploadFile(chunk);
await this.sendVoiceMessage(chunk, url, file);
};
await this.callWithRetry(uploadAndSendFn);
};
/**
* This function is called on connection errors.
* It sets the connection error state and stops the recorder.
*/
private async onConnectionError(): Promise<void> {
this.playConnectionErrorAudioNotification().catch(() => {
// Error logged in playConnectionErrorAudioNotification().
});
await this.stopRecorder(false);
this.setState("connection_error");
}
private async playConnectionErrorAudioNotification(): Promise<void> {
if (localNotificationsAreSilenced(this.client)) {
return;
}
await this.backgroundAudio.pickFormatAndPlay("./media/error", ["mp3", "ogg"]);
}
private async uploadFile(chunk: ChunkRecordedPayload): ReturnType<typeof uploadFile> {
return uploadFile(
this.client,
this.roomId,
new Blob([chunk.buffer], {
type: this.getRecorder().contentType,
}),
);
}
private async sendVoiceMessage(chunk: ChunkRecordedPayload, url?: string, file?: EncryptedFile): Promise<void> {
/**
* Increment the last sequence number and use it for this message.
* Done outside of the sendMessageFn to get a scoped value.
* Also see {@link VoiceBroadcastRecording.sequence}.
*/
const sequence = ++this.sequence;
const sendMessageFn = async (): Promise<void> => {
const content = createVoiceMessageContent(
url,
this.getRecorder().contentType,
Math.round(chunk.length * 1000),
chunk.buffer.length,
file,
);
content["m.relates_to"] = {
rel_type: RelationType.Reference,
event_id: this.infoEventId,
};
(<AudioContent>content)["io.element.voice_broadcast_chunk"] = {
sequence,
};
await this.client.sendMessage(this.roomId, content);
};
await this.callWithRetry(sendMessageFn);
}
/**
* Sends an info state event with given state.
* On error stores a resend function and setState(state) in {@link toRetry} and
* sets the broadcast state to connection_error.
*/
private async sendInfoStateEvent(state: VoiceBroadcastInfoState): Promise<void> {
const sendEventFn = async (): Promise<void> => {
await this.client.sendStateEvent(
this.roomId,
VoiceBroadcastInfoEventType,
{
device_id: this.client.getDeviceId(),
state,
last_chunk_sequence: this.sequence,
["m.relates_to"]: {
rel_type: RelationType.Reference,
event_id: this.infoEventId,
},
} as VoiceBroadcastInfoEventContent,
this.client.getSafeUserId(),
);
};
await this.callWithRetry(sendEventFn);
}
/**
* Calls the function.
* On failure adds it to the retry list and triggers connection error.
* {@link toRetry}
* {@link onConnectionError}
*/
private async callWithRetry(retryAbleFn: () => Promise<void>): Promise<void> {
try {
await retryAbleFn();
} catch {
this.toRetry.push(retryAbleFn);
this.onConnectionError();
}
}
private async stopRecorder(emit = true): Promise<void> {
if (!this.recorder) {
return;
}
try {
const lastChunk = await this.recorder.stop();
if (lastChunk && emit) {
await this.onChunkRecorded(lastChunk);
}
} catch (err) {
logger.warn("error stopping voice broadcast recorder", err);
}
}
}

View File

@ -1,113 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent, TypedEventEmitter } from "matrix-js-sdk/src/matrix";
import {
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackEvent,
VoiceBroadcastPlaybackState,
VoiceBroadcastRecordingsStore,
} from "..";
import { IDestroyable } from "../../utils/IDestroyable";
export enum VoiceBroadcastPlaybacksStoreEvent {
CurrentChanged = "current_changed",
}
interface EventMap {
[VoiceBroadcastPlaybacksStoreEvent.CurrentChanged]: (recording: VoiceBroadcastPlayback | null) => void;
}
/**
* This store manages VoiceBroadcastPlaybacks:
* - access the currently playing voice broadcast
* - ensures that only once broadcast is playing at a time
*/
export class VoiceBroadcastPlaybacksStore
extends TypedEventEmitter<VoiceBroadcastPlaybacksStoreEvent, EventMap>
implements IDestroyable
{
private current: VoiceBroadcastPlayback | null = null;
/** Playbacks indexed by their info event id. */
private playbacks = new Map<string, VoiceBroadcastPlayback>();
public constructor(private recordings: VoiceBroadcastRecordingsStore) {
super();
}
public setCurrent(current: VoiceBroadcastPlayback): void {
if (this.current === current) return;
this.current = current;
this.addPlayback(current);
this.emit(VoiceBroadcastPlaybacksStoreEvent.CurrentChanged, current);
}
public clearCurrent(): void {
if (this.current === null) return;
this.current = null;
this.emit(VoiceBroadcastPlaybacksStoreEvent.CurrentChanged, null);
}
public getCurrent(): VoiceBroadcastPlayback | null {
return this.current;
}
public getByInfoEvent(infoEvent: MatrixEvent, client: MatrixClient): VoiceBroadcastPlayback {
const infoEventId = infoEvent.getId()!;
if (!this.playbacks.has(infoEventId)) {
this.addPlayback(new VoiceBroadcastPlayback(infoEvent, client, this.recordings));
}
return this.playbacks.get(infoEventId)!;
}
private addPlayback(playback: VoiceBroadcastPlayback): void {
const infoEventId = playback.infoEvent.getId()!;
if (this.playbacks.has(infoEventId)) return;
this.playbacks.set(infoEventId, playback);
playback.on(VoiceBroadcastPlaybackEvent.StateChanged, this.onPlaybackStateChanged);
}
private onPlaybackStateChanged = (state: VoiceBroadcastPlaybackState, playback: VoiceBroadcastPlayback): void => {
switch (state) {
case VoiceBroadcastPlaybackState.Buffering:
case VoiceBroadcastPlaybackState.Playing:
this.pauseExcept(playback);
this.setCurrent(playback);
break;
case VoiceBroadcastPlaybackState.Stopped:
this.clearCurrent();
break;
}
};
private pauseExcept(playbackNotToPause: VoiceBroadcastPlayback): void {
for (const playback of this.playbacks.values()) {
if (playback !== playbackNotToPause) {
playback.pause();
}
}
}
public destroy(): void {
this.removeAllListeners();
for (const playback of this.playbacks.values()) {
playback.off(VoiceBroadcastPlaybackEvent.StateChanged, this.onPlaybackStateChanged);
}
this.playbacks = new Map();
}
}

View File

@ -1,63 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { TypedEventEmitter } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastPreRecording } from "..";
import { IDestroyable } from "../../utils/IDestroyable";
export type VoiceBroadcastPreRecordingEvent = "changed";
interface EventMap {
changed: (preRecording: VoiceBroadcastPreRecording | null) => void;
}
export class VoiceBroadcastPreRecordingStore
extends TypedEventEmitter<VoiceBroadcastPreRecordingEvent, EventMap>
implements IDestroyable
{
private current: VoiceBroadcastPreRecording | null = null;
public setCurrent(current: VoiceBroadcastPreRecording): void {
if (this.current === current) return;
if (this.current) {
this.current.off("dismiss", this.onCancel);
}
this.current = current;
current.on("dismiss", this.onCancel);
this.emit("changed", current);
}
public clearCurrent(): void {
if (this.current === null) return;
this.current.off("dismiss", this.onCancel);
this.current = null;
this.emit("changed", null);
}
public getCurrent(): VoiceBroadcastPreRecording | null {
return this.current;
}
public destroy(): void {
this.removeAllListeners();
if (this.current) {
this.current.off("dismiss", this.onCancel);
}
}
private onCancel = (voiceBroadcastPreRecording: VoiceBroadcastPreRecording): void => {
if (this.current === voiceBroadcastPreRecording) {
this.clearCurrent();
}
};
}

View File

@ -1,89 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022, 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent, TypedEventEmitter } from "matrix-js-sdk/src/matrix";
import {
VoiceBroadcastInfoState,
VoiceBroadcastRecording,
VoiceBroadcastRecordingEvent,
VoiceBroadcastRecordingState,
} from "..";
export enum VoiceBroadcastRecordingsStoreEvent {
CurrentChanged = "current_changed",
}
interface EventMap {
[VoiceBroadcastRecordingsStoreEvent.CurrentChanged]: (recording: VoiceBroadcastRecording | null) => void;
}
/**
* This store provides access to the current and specific Voice Broadcast recordings.
*/
export class VoiceBroadcastRecordingsStore extends TypedEventEmitter<VoiceBroadcastRecordingsStoreEvent, EventMap> {
private current: VoiceBroadcastRecording | null = null;
private recordings = new Map<string, VoiceBroadcastRecording>();
public constructor() {
super();
}
public setCurrent(current: VoiceBroadcastRecording): void {
if (this.current === current) return;
const infoEventId = current.infoEvent.getId();
if (!infoEventId) {
throw new Error("Got broadcast info event without Id");
}
if (this.current) {
this.current.off(VoiceBroadcastRecordingEvent.StateChanged, this.onCurrentStateChanged);
}
this.current = current;
this.current.on(VoiceBroadcastRecordingEvent.StateChanged, this.onCurrentStateChanged);
this.recordings.set(infoEventId, current);
this.emit(VoiceBroadcastRecordingsStoreEvent.CurrentChanged, current);
}
public getCurrent(): VoiceBroadcastRecording | null {
return this.current;
}
public hasCurrent(): boolean {
return this.current !== null;
}
public clearCurrent(): void {
if (!this.current) return;
this.current.off(VoiceBroadcastRecordingEvent.StateChanged, this.onCurrentStateChanged);
this.current = null;
this.emit(VoiceBroadcastRecordingsStoreEvent.CurrentChanged, null);
}
public getByInfoEvent(infoEvent: MatrixEvent, client: MatrixClient): VoiceBroadcastRecording {
const infoEventId = infoEvent.getId();
if (!infoEventId) {
throw new Error("Got broadcast info event without Id");
}
const recording = this.recordings.get(infoEventId) || new VoiceBroadcastRecording(infoEvent, client);
this.recordings.set(infoEventId, recording);
return recording;
}
private onCurrentStateChanged = (state: VoiceBroadcastRecordingState): void => {
if (state === VoiceBroadcastInfoState.Stopped) {
this.clearCurrent();
}
};
}

View File

@ -1,32 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { RelationType } from "matrix-js-sdk/src/matrix";
export const VoiceBroadcastInfoEventType = "io.element.voice_broadcast_info";
export const VoiceBroadcastChunkEventType = "io.element.voice_broadcast_chunk";
export type VoiceBroadcastLiveness = "live" | "not-live" | "grey";
export enum VoiceBroadcastInfoState {
Started = "started",
Paused = "paused",
Resumed = "resumed",
Stopped = "stopped",
}
export interface VoiceBroadcastInfoEventContent {
device_id: string;
state: VoiceBroadcastInfoState;
chunk_length?: number;
last_chunk_sequence?: number;
["m.relates_to"]?: {
rel_type: RelationType;
event_id: string;
};
}

View File

@ -1,147 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastChunkEventType } from "..";
/**
* Voice broadcast chunk collection.
* Orders chunks by sequence (if available) or timestamp.
*/
export class VoiceBroadcastChunkEvents {
private events: MatrixEvent[] = [];
public getEvents(): MatrixEvent[] {
return [...this.events];
}
public getNext(event: MatrixEvent): MatrixEvent | undefined {
return this.events[this.events.indexOf(event) + 1];
}
public addEvent(event: MatrixEvent): void {
if (this.addOrReplaceEvent(event)) {
this.sort();
}
}
public addEvents(events: MatrixEvent[]): void {
const atLeastOneNew = events.reduce((newSoFar: boolean, event: MatrixEvent): boolean => {
return this.addOrReplaceEvent(event) || newSoFar;
}, false);
if (atLeastOneNew) {
this.sort();
}
}
public includes(event: MatrixEvent): boolean {
return !!this.events.find((e) => this.equalByTxnIdOrId(event, e));
}
/**
* @returns {number} Length in milliseconds
*/
public getLength(): number {
return this.events.reduce((length: number, event: MatrixEvent) => {
return length + this.calculateChunkLength(event);
}, 0);
}
public getLengthSeconds(): number {
return this.getLength() / 1000;
}
/**
* Returns the accumulated length to (excl.) a chunk event.
*/
public getLengthTo(event: MatrixEvent): number {
let length = 0;
for (let i = 0; i < this.events.indexOf(event); i++) {
length += this.calculateChunkLength(this.events[i]);
}
return length;
}
public findByTime(time: number): MatrixEvent | null {
let lengthSoFar = 0;
for (let i = 0; i < this.events.length; i++) {
lengthSoFar += this.calculateChunkLength(this.events[i]);
if (lengthSoFar >= time) {
return this.events[i];
}
}
return null;
}
public isLast(event: MatrixEvent): boolean {
return this.events.indexOf(event) >= this.events.length - 1;
}
public getSequenceForEvent(event: MatrixEvent): number | null {
const sequence = parseInt(event.getContent()?.[VoiceBroadcastChunkEventType]?.sequence, 10);
if (!isNaN(sequence)) return sequence;
if (this.events.includes(event)) return this.events.indexOf(event) + 1;
return null;
}
public getNumberOfEvents(): number {
return this.events.length;
}
private calculateChunkLength(event: MatrixEvent): number {
return event.getContent()?.["org.matrix.msc1767.audio"]?.duration || event.getContent()?.info?.duration || 0;
}
private addOrReplaceEvent = (event: MatrixEvent): boolean => {
this.events = this.events.filter((e) => !this.equalByTxnIdOrId(event, e));
this.events.push(event);
return true;
};
private equalByTxnIdOrId(eventA: MatrixEvent, eventB: MatrixEvent): boolean {
return (
(eventA.getTxnId() && eventB.getTxnId() && eventA.getTxnId() === eventB.getTxnId()) ||
eventA.getId() === eventB.getId()
);
}
/**
* Sort by sequence, if available for all events.
* Else fall back to timestamp.
*/
private sort(): void {
const compareFn = this.allHaveSequence() ? this.compareBySequence : this.compareByTimestamp;
this.events.sort(compareFn);
}
private compareBySequence = (a: MatrixEvent, b: MatrixEvent): number => {
const aSequence = a.getContent()?.[VoiceBroadcastChunkEventType]?.sequence || 0;
const bSequence = b.getContent()?.[VoiceBroadcastChunkEventType]?.sequence || 0;
return aSequence - bSequence;
};
private compareByTimestamp = (a: MatrixEvent, b: MatrixEvent): number => {
return a.getTs() - b.getTs();
};
private allHaveSequence(): boolean {
return !this.events.some((event: MatrixEvent) => {
const sequence = event.getContent()?.[VoiceBroadcastChunkEventType]?.sequence;
return parseInt(sequence, 10) !== sequence;
});
}
}

View File

@ -1,90 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { ClientEvent, MatrixClient, MatrixEvent, RelationType, Room, SyncState } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventContent, VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
import { IDestroyable } from "../../utils/IDestroyable";
import { findRoomLiveVoiceBroadcastFromUserAndDevice } from "./findRoomLiveVoiceBroadcastFromUserAndDevice";
/**
* Handles voice broadcasts on app resume (after logging in, reload, crash).
*/
export class VoiceBroadcastResumer implements IDestroyable {
public constructor(private client: MatrixClient) {
if (client.isInitialSyncComplete()) {
this.resume();
} else {
// wait for initial sync
client.on(ClientEvent.Sync, this.onClientSync);
}
}
private onClientSync = (): void => {
if (this.client.getSyncState() === SyncState.Syncing) {
this.client.off(ClientEvent.Sync, this.onClientSync);
this.resume();
}
};
private resume(): void {
const userId = this.client.getUserId();
const deviceId = this.client.getDeviceId();
if (!userId || !deviceId) {
// Resuming a voice broadcast only makes sense if there is a user.
return;
}
this.client.getRooms().forEach((room: Room) => {
const infoEvent = findRoomLiveVoiceBroadcastFromUserAndDevice(room, userId, deviceId);
if (infoEvent) {
// Found a live broadcast event from current device; stop it.
// Stopping it is a temporary solution (see PSF-1669).
this.sendStopVoiceBroadcastStateEvent(infoEvent);
return false;
}
});
}
private sendStopVoiceBroadcastStateEvent(infoEvent: MatrixEvent): void {
const userId = this.client.getUserId();
const deviceId = this.client.getDeviceId();
const roomId = infoEvent.getRoomId();
if (!userId || !deviceId || !roomId) {
// We can only send a state event if we know all the IDs.
return;
}
const content: VoiceBroadcastInfoEventContent = {
device_id: deviceId,
state: VoiceBroadcastInfoState.Stopped,
};
// all events should reference the started event
const referencedEventId =
infoEvent.getContent()?.state === VoiceBroadcastInfoState.Started
? infoEvent.getId()
: infoEvent.getContent()?.["m.relates_to"]?.event_id;
if (referencedEventId) {
content["m.relates_to"] = {
rel_type: RelationType.Reference,
event_id: referencedEventId,
};
}
this.client.sendStateEvent(roomId, VoiceBroadcastInfoEventType, content, userId);
}
public destroy(): void {
this.client.off(ClientEvent.Sync, this.onClientSync);
}
}

View File

@ -1,86 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import { MatrixClient, Room, SyncState } from "matrix-js-sdk/src/matrix";
import { hasRoomLiveVoiceBroadcast, VoiceBroadcastInfoEventType, VoiceBroadcastRecordingsStore } from "..";
import InfoDialog from "../../components/views/dialogs/InfoDialog";
import { _t } from "../../languageHandler";
import Modal from "../../Modal";
const showAlreadyRecordingDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voice_broadcast|failed_already_recording_title"),
description: <p>{_t("voice_broadcast|failed_already_recording_description")}</p>,
hasCloseButton: true,
});
};
const showInsufficientPermissionsDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voice_broadcast|failed_insufficient_permission_title"),
description: <p>{_t("voice_broadcast|failed_insufficient_permission_description")}</p>,
hasCloseButton: true,
});
};
const showOthersAlreadyRecordingDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voice_broadcast|failed_others_already_recording_title"),
description: <p>{_t("voice_broadcast|failed_others_already_recording_description")}</p>,
hasCloseButton: true,
});
};
const showNoConnectionDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voice_broadcast|failed_no_connection_title"),
description: <p>{_t("voice_broadcast|failed_no_connection_description")}</p>,
hasCloseButton: true,
});
};
export const checkVoiceBroadcastPreConditions = async (
room: Room,
client: MatrixClient,
recordingsStore: VoiceBroadcastRecordingsStore,
): Promise<boolean> => {
if (recordingsStore.getCurrent()) {
showAlreadyRecordingDialog();
return false;
}
const currentUserId = client.getUserId();
if (!currentUserId) return false;
if (!room.currentState.maySendStateEvent(VoiceBroadcastInfoEventType, currentUserId)) {
showInsufficientPermissionsDialog();
return false;
}
if (client.getSyncState() === SyncState.Error) {
showNoConnectionDialog();
return false;
}
const { hasBroadcast, startedByUser } = await hasRoomLiveVoiceBroadcast(client, room, currentUserId);
if (hasBroadcast && startedByUser) {
showAlreadyRecordingDialog();
return false;
}
if (hasBroadcast) {
showOthersAlreadyRecordingDialog();
return false;
}
return true;
};

View File

@ -1,20 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2023 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { SdkContextClass } from "../../contexts/SDKContext";
export const cleanUpBroadcasts = async (stores: SdkContextClass): Promise<void> => {
stores.voiceBroadcastPlaybacksStore.getCurrent()?.stop();
stores.voiceBroadcastPlaybacksStore.clearCurrent();
await stores.voiceBroadcastRecordingsStore.getCurrent()?.stop();
stores.voiceBroadcastRecordingsStore.clearCurrent();
stores.voiceBroadcastPreRecordingStore.getCurrent()?.cancel();
stores.voiceBroadcastPreRecordingStore.clearCurrent();
};

View File

@ -1,20 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { VoiceBroadcastInfoState, VoiceBroadcastLiveness } from "..";
const stateLivenessMap: Map<VoiceBroadcastInfoState, VoiceBroadcastLiveness> = new Map([
["started", "live"],
["resumed", "live"],
["paused", "grey"],
["stopped", "not-live"],
] as Array<[VoiceBroadcastInfoState, VoiceBroadcastLiveness]>);
export const determineVoiceBroadcastLiveness = (infoState: VoiceBroadcastInfoState): VoiceBroadcastLiveness => {
return stateLivenessMap.get(infoState) ?? "not-live";
};

View File

@ -1,18 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { VoiceBroadcastPlaybacksStore, VoiceBroadcastPlaybackState } from "..";
export const doClearCurrentVoiceBroadcastPlaybackIfStopped = (
voiceBroadcastPlaybacksStore: VoiceBroadcastPlaybacksStore,
): void => {
if (voiceBroadcastPlaybacksStore.getCurrent()?.getState() === VoiceBroadcastPlaybackState.Stopped) {
// clear current if stopped
return;
}
};

View File

@ -1,56 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, Room } from "matrix-js-sdk/src/matrix";
import {
hasRoomLiveVoiceBroadcast,
VoiceBroadcastPlaybacksStore,
VoiceBroadcastPlaybackState,
VoiceBroadcastRecordingsStore,
} from "..";
/**
* When a live voice broadcast is in the room and
* another voice broadcast is not currently being listened to or recorded
* the live broadcast in the room is set as the current broadcast to listen to.
* When there is no live broadcast in the room: clear current broadcast.
*
* @param {Room} room The room to check for a live voice broadcast
* @param {MatrixClient} client
* @param {VoiceBroadcastPlaybacksStore} voiceBroadcastPlaybacksStore
* @param {VoiceBroadcastRecordingsStore} voiceBroadcastRecordingsStore
*/
export const doMaybeSetCurrentVoiceBroadcastPlayback = async (
room: Room,
client: MatrixClient,
voiceBroadcastPlaybacksStore: VoiceBroadcastPlaybacksStore,
voiceBroadcastRecordingsStore: VoiceBroadcastRecordingsStore,
): Promise<void> => {
// do not disturb the current recording
if (voiceBroadcastRecordingsStore.hasCurrent()) return;
const currentPlayback = voiceBroadcastPlaybacksStore.getCurrent();
if (currentPlayback && currentPlayback.getState() !== VoiceBroadcastPlaybackState.Stopped) {
// do not disturb the current playback
return;
}
const { infoEvent } = await hasRoomLiveVoiceBroadcast(client, room);
if (infoEvent) {
// live broadcast in the room + no recording + not listening yet: set the current broadcast
const voiceBroadcastPlayback = voiceBroadcastPlaybacksStore.getByInfoEvent(infoEvent, client);
voiceBroadcastPlaybacksStore.setCurrent(voiceBroadcastPlayback);
return;
}
// no broadcast; not listening: clear current
voiceBroadcastPlaybacksStore.clearCurrent();
};

View File

@ -1,29 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent, Room } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
export const findRoomLiveVoiceBroadcastFromUserAndDevice = (
room: Room,
userId: string,
deviceId: string,
): MatrixEvent | null => {
const stateEvent = room.currentState.getStateEvents(VoiceBroadcastInfoEventType, userId);
// no broadcast from that user
if (!stateEvent) return null;
const content = stateEvent.getContent() || {};
// stopped broadcast
if (content.state === VoiceBroadcastInfoState.Stopped) return null;
return content.device_id === deviceId ? stateEvent : null;
};

View File

@ -1,23 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import SdkConfig, { DEFAULTS } from "../../SdkConfig";
import { Features } from "../../settings/Settings";
import SettingsStore from "../../settings/SettingsStore";
/**
* Returns the target chunk length for voice broadcasts:
* - If {@see Features.VoiceBroadcastForceSmallChunks} is enabled uses 15s chunk length
* - Otherwise to get the value from the voice_broadcast.chunk_length config
* - If that fails from DEFAULTS
* - If that fails fall back to 120 (two minutes)
*/
export const getChunkLength = (): number => {
if (SettingsStore.getValue(Features.VoiceBroadcastForceSmallChunks)) return 15;
return SdkConfig.get("voice_broadcast")?.chunk_length || DEFAULTS.voice_broadcast?.chunk_length || 120;
};

View File

@ -1,19 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import SdkConfig, { DEFAULTS } from "../../SdkConfig";
/**
* Returns the max length for voice broadcasts:
* - Tries to get the value from the voice_broadcast.max_length config
* - If that fails from DEFAULTS
* - If that fails fall back to four hours
*/
export const getMaxBroadcastLength = (): number => {
return SdkConfig.get("voice_broadcast")?.max_length || DEFAULTS.voice_broadcast?.max_length || 4 * 60 * 60;
};

View File

@ -1,57 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent, Room } from "matrix-js-sdk/src/matrix";
import { retrieveStartedInfoEvent, VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
import { asyncEvery } from "../../utils/arrays";
interface Result {
// whether there is a live broadcast in the room
hasBroadcast: boolean;
// info event of any live broadcast in the room
infoEvent: MatrixEvent | null;
// whether the broadcast was started by the user
startedByUser: boolean;
}
export const hasRoomLiveVoiceBroadcast = async (client: MatrixClient, room: Room, userId?: string): Promise<Result> => {
let hasBroadcast = false;
let startedByUser = false;
let infoEvent: MatrixEvent | null = null;
const stateEvents = room.currentState.getStateEvents(VoiceBroadcastInfoEventType);
await asyncEvery(stateEvents, async (event: MatrixEvent) => {
const state = event.getContent()?.state;
if (state && state !== VoiceBroadcastInfoState.Stopped) {
const startEvent = await retrieveStartedInfoEvent(event, client);
// skip if started voice broadcast event is redacted
if (startEvent?.isRedacted()) return true;
hasBroadcast = true;
infoEvent = startEvent;
// state key = sender's MXID
if (event.getStateKey() === userId) {
startedByUser = true;
// break here, because more than true / true is not possible
return false;
}
}
return true;
});
return {
hasBroadcast,
infoEvent,
startedByUser,
};
};

View File

@ -1,21 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent, RelationType } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType } from "../types";
export const isRelatedToVoiceBroadcast = (event: MatrixEvent, client: MatrixClient): boolean => {
const relation = event.getRelation();
return (
relation?.rel_type === RelationType.Reference &&
!!relation.event_id &&
client.getRoom(event.getRoomId())?.findEventById(relation.event_id)?.getType() === VoiceBroadcastInfoEventType
);
};

View File

@ -1,17 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "../types";
export const isVoiceBroadcastStartedEvent = (event: MatrixEvent): boolean => {
return (
event.getType() === VoiceBroadcastInfoEventType && event.getContent()?.state === VoiceBroadcastInfoState.Started
);
};

View File

@ -1,29 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { Room } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastPlaybacksStore } from "..";
export const pauseNonLiveBroadcastFromOtherRoom = (
room: Room,
voiceBroadcastPlaybacksStore: VoiceBroadcastPlaybacksStore,
): void => {
const playingBroadcast = voiceBroadcastPlaybacksStore.getCurrent();
if (
!playingBroadcast ||
playingBroadcast?.getLiveness() === "live" ||
playingBroadcast?.infoEvent.getRoomId() === room.roomId
) {
return;
}
voiceBroadcastPlaybacksStore.clearCurrent();
playingBroadcast.pause();
};

View File

@ -1,37 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoState } from "..";
export const retrieveStartedInfoEvent = async (
event: MatrixEvent,
client: MatrixClient,
): Promise<MatrixEvent | null> => {
// started event passed as argument
if (event.getContent()?.state === VoiceBroadcastInfoState.Started) return event;
const relatedEventId = event.getRelation()?.event_id;
// no related event
if (!relatedEventId) return null;
const roomId = event.getRoomId() || "";
const relatedEventFromRoom = client.getRoom(roomId)?.findEventById(relatedEventId);
// event found
if (relatedEventFromRoom) return relatedEventFromRoom;
try {
const relatedEventData = await client.fetchRoomEvent(roomId, relatedEventId);
return new MatrixEvent(relatedEventData);
} catch {}
return null;
};

View File

@ -1,43 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, Room } from "matrix-js-sdk/src/matrix";
import {
checkVoiceBroadcastPreConditions,
VoiceBroadcastPlaybacksStore,
VoiceBroadcastPreRecording,
VoiceBroadcastPreRecordingStore,
VoiceBroadcastRecordingsStore,
} from "..";
export const setUpVoiceBroadcastPreRecording = async (
room: Room,
client: MatrixClient,
playbacksStore: VoiceBroadcastPlaybacksStore,
recordingsStore: VoiceBroadcastRecordingsStore,
preRecordingStore: VoiceBroadcastPreRecordingStore,
): Promise<VoiceBroadcastPreRecording | null> => {
if (!(await checkVoiceBroadcastPreConditions(room, client, recordingsStore))) {
return null;
}
const userId = client.getUserId();
if (!userId) return null;
const sender = room.getMember(userId);
if (!sender) return null;
// pause and clear current playback (if any)
playbacksStore.getCurrent()?.pause();
playbacksStore.clearCurrent();
const preRecording = new VoiceBroadcastPreRecording(room, sender, client, playbacksStore, recordingsStore);
preRecordingStore.setCurrent(preRecording);
return preRecording;
};

View File

@ -1,25 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixClient, MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventContent, VoiceBroadcastInfoState } from "..";
export const shouldDisplayAsVoiceBroadcastRecordingTile = (
state: VoiceBroadcastInfoState,
client: MatrixClient,
event: MatrixEvent,
): boolean => {
const userId = client.getUserId();
return (
!!userId &&
userId === event.getSender() &&
client.getDeviceId() === event.getContent<VoiceBroadcastInfoEventContent>()?.device_id &&
state !== VoiceBroadcastInfoState.Stopped
);
};

View File

@ -1,16 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
export const shouldDisplayAsVoiceBroadcastStoppedText = (event: MatrixEvent): boolean =>
event.getType() === VoiceBroadcastInfoEventType &&
event.getContent()?.state === VoiceBroadcastInfoState.Stopped &&
!event.isRedacted();

View File

@ -1,15 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
export const shouldDisplayAsVoiceBroadcastTile = (event: MatrixEvent): boolean =>
event.getType?.() === VoiceBroadcastInfoEventType &&
(event.getContent?.()?.state === VoiceBroadcastInfoState.Started || event.isRedacted());

View File

@ -1,21 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React from "react";
import InfoDialog from "../../components/views/dialogs/InfoDialog";
import { _t } from "../../languageHandler";
import Modal from "../../Modal";
export const showCantStartACallDialog = (): void => {
Modal.createDialog(InfoDialog, {
title: _t("voip|failed_call_live_broadcast_title"),
description: <p>{_t("voip|failed_call_live_broadcast_description")}</p>,
hasCloseButton: true,
});
};

View File

@ -1,92 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import { ISendEventResponse, MatrixClient, Room, RoomStateEvent } from "matrix-js-sdk/src/matrix";
import { defer } from "matrix-js-sdk/src/utils";
import {
VoiceBroadcastInfoEventContent,
VoiceBroadcastInfoEventType,
VoiceBroadcastInfoState,
VoiceBroadcastRecordingsStore,
VoiceBroadcastRecording,
getChunkLength,
VoiceBroadcastPlaybacksStore,
} from "..";
import { checkVoiceBroadcastPreConditions } from "./checkVoiceBroadcastPreConditions";
const startBroadcast = async (
room: Room,
client: MatrixClient,
recordingsStore: VoiceBroadcastRecordingsStore,
): Promise<VoiceBroadcastRecording> => {
const { promise, resolve, reject } = defer<VoiceBroadcastRecording>();
const userId = client.getUserId();
if (!userId) {
reject("unable to start voice broadcast if current user is unknown");
return promise;
}
let result: ISendEventResponse | null = null;
const onRoomStateEvents = (): void => {
if (!result) return;
const voiceBroadcastEvent = room.currentState.getStateEvents(VoiceBroadcastInfoEventType, userId);
if (voiceBroadcastEvent?.getId() === result.event_id) {
room.off(RoomStateEvent.Events, onRoomStateEvents);
const recording = new VoiceBroadcastRecording(voiceBroadcastEvent, client);
recordingsStore.setCurrent(recording);
recording.start();
resolve(recording);
}
};
room.on(RoomStateEvent.Events, onRoomStateEvents);
// XXX Michael W: refactor to live event
result = await client.sendStateEvent(
room.roomId,
VoiceBroadcastInfoEventType,
{
device_id: client.getDeviceId(),
state: VoiceBroadcastInfoState.Started,
chunk_length: getChunkLength(),
} as VoiceBroadcastInfoEventContent,
userId,
);
return promise;
};
/**
* Starts a new Voice Broadcast Recording, if
* - the user has the permissions to do so in the room
* - the user is not already recording a voice broadcast
* - there is no other broadcast being recorded in the room, yet
* Sends a voice_broadcast_info state event and waits for the event to actually appear in the room state.
*/
export const startNewVoiceBroadcastRecording = async (
room: Room,
client: MatrixClient,
playbacksStore: VoiceBroadcastPlaybacksStore,
recordingsStore: VoiceBroadcastRecordingsStore,
): Promise<VoiceBroadcastRecording | null> => {
if (!(await checkVoiceBroadcastPreConditions(room, client, recordingsStore))) {
return null;
}
// pause and clear current playback (if any)
playbacksStore.getCurrent()?.pause();
playbacksStore.clearCurrent();
return startBroadcast(room, client, recordingsStore);
};

View File

@ -1,41 +0,0 @@
/*
Copyright 2024 New Vector Ltd.
Copyright 2022 The Matrix.org Foundation C.I.C.
SPDX-License-Identifier: AGPL-3.0-only OR GPL-3.0-only
Please see LICENSE files in the repository root for full details.
*/
import React, { ReactNode } from "react";
import { MatrixClient, MatrixEvent } from "matrix-js-sdk/src/matrix";
import { MatrixClientPeg } from "../../MatrixClientPeg";
import AccessibleButton from "../../components/views/elements/AccessibleButton";
import { highlightEvent } from "../../utils/EventUtils";
import { _t } from "../../languageHandler";
import { getSenderName } from "../../utils/event/getSenderName";
export const textForVoiceBroadcastStoppedEvent = (event: MatrixEvent, client: MatrixClient): (() => ReactNode) => {
return (): ReactNode => {
const ownUserId = MatrixClientPeg.get()?.getUserId();
const startEventId = event.getRelation()?.event_id;
const roomId = event.getRoomId();
const templateTags = {
a: (text: string) =>
startEventId && roomId ? (
<AccessibleButton kind="link_inline" onClick={(): void => highlightEvent(roomId, startEventId)}>
{text}
</AccessibleButton>
) : (
text
),
};
if (ownUserId && ownUserId === event.getSender()) {
return _t("timeline|io.element.voice_broadcast_info|you", {}, templateTags);
}
return _t("timeline|io.element.voice_broadcast_info|user", { senderName: getSenderName(event) }, templateTags);
};
};

Some files were not shown because too many files have changed in this diff Show More