Apply prettier formatting

This commit is contained in:
Michael Weimann 2022-12-12 12:24:14 +01:00
parent 1cac306093
commit 526645c791
No known key found for this signature in database
GPG key ID: 53F535A266BB9584
1576 changed files with 65385 additions and 62478 deletions

View file

@ -45,7 +45,8 @@ export interface ChunkRecordedPayload {
*/
export class VoiceBroadcastRecorder
extends TypedEventEmitter<VoiceBroadcastRecorderEvent, EventMap>
implements IDestroyable {
implements IDestroyable
{
private headers = new Uint8Array(0);
private chunkBuffer = new Uint8Array(0);
// position of the previous chunk in seconds
@ -54,10 +55,7 @@ export class VoiceBroadcastRecorder
// current chunk length in seconds
private currentChunkLength = 0;
public constructor(
private voiceRecording: VoiceRecording,
public readonly targetChunkLength: number,
) {
public constructor(private voiceRecording: VoiceRecording, public readonly targetChunkLength: number) {
super();
this.voiceRecording.onDataAvailable = this.onDataAvailable;
}
@ -148,10 +146,7 @@ export class VoiceBroadcastRecorder
return;
}
this.emit(
VoiceBroadcastRecorderEvent.ChunkRecorded,
this.extractChunk(),
);
this.emit(VoiceBroadcastRecorderEvent.ChunkRecorded, this.extractChunk());
}
public destroy(): void {

View file

@ -58,13 +58,9 @@ export const VoiceBroadcastBody: React.FC<IBodyProps> = ({ mxEvent }) => {
if (shouldDisplayAsVoiceBroadcastRecordingTile(infoState, client, mxEvent)) {
const recording = VoiceBroadcastRecordingsStore.instance().getByInfoEvent(mxEvent, client);
return <VoiceBroadcastRecordingBody
recording={recording}
/>;
return <VoiceBroadcastRecordingBody recording={recording} />;
}
const playback = VoiceBroadcastPlaybacksStore.instance().getByInfoEvent(mxEvent, client);
return <VoiceBroadcastPlaybackBody
playback={playback}
/>;
return <VoiceBroadcastPlaybackBody playback={playback} />;
};

View file

@ -24,18 +24,15 @@ interface Props {
grey?: boolean;
}
export const LiveBadge: React.FC<Props> = ({
grey = false,
}) => {
const liveBadgeClasses = classNames(
"mx_LiveBadge",
{
"mx_LiveBadge--grey": grey,
},
);
export const LiveBadge: React.FC<Props> = ({ grey = false }) => {
const liveBadgeClasses = classNames("mx_LiveBadge", {
"mx_LiveBadge--grey": grey,
});
return <div className={liveBadgeClasses}>
<LiveIcon className="mx_Icon mx_Icon_16" />
{ _t("Live") }
</div>;
return (
<div className={liveBadgeClasses}>
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("Live")}
</div>
);
};

View file

@ -24,16 +24,10 @@ interface Props {
onClick: () => void;
}
export const SeekButton: React.FC<Props> = ({
onClick,
icon: Icon,
label,
}) => {
return <AccessibleButton
kind="secondary_content"
onClick={onClick}
aria-label={label}
>
<Icon className="mx_Icon mx_Icon_24" />
</AccessibleButton>;
export const SeekButton: React.FC<Props> = ({ onClick, icon: Icon, label }) => {
return (
<AccessibleButton kind="secondary_content" onClick={onClick} aria-label={label}>
<Icon className="mx_Icon mx_Icon_24" />
</AccessibleButton>
);
};

View file

@ -26,17 +26,14 @@ interface Props {
onClick: () => void;
}
export const VoiceBroadcastControl: React.FC<Props> = ({
className = "",
icon: Icon,
label,
onClick,
}) => {
return <AccessibleButton
className={classNames("mx_VoiceBroadcastControl", className)}
onClick={onClick}
aria-label={label}
>
<Icon className="mx_Icon mx_Icon_16" />
</AccessibleButton>;
export const VoiceBroadcastControl: React.FC<Props> = ({ className = "", icon: Icon, label, onClick }) => {
return (
<AccessibleButton
className={classNames("mx_VoiceBroadcastControl", className)}
onClick={onClick}
aria-label={label}
>
<Icon className="mx_Icon mx_Icon_16" />
</AccessibleButton>
);
};

View file

@ -54,13 +54,11 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
const broadcast = showBroadcast && (
<div className="mx_VoiceBroadcastHeader_line">
<LiveIcon className="mx_Icon mx_Icon_16" />
{ _t("Voice broadcast") }
{_t("Voice broadcast")}
</div>
);
const liveBadge = live !== "not-live" && (
<LiveBadge grey={live === "grey"} />
);
const liveBadge = live !== "not-live" && <LiveBadge grey={live === "grey"} />;
const closeButton = showClose && (
<AccessibleButton onClick={onCloseClick}>
@ -78,7 +76,7 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
const buffering = showBuffering && (
<div className="mx_VoiceBroadcastHeader_line">
<Spinner w={14} h={14} />
{ _t("Buffering…") }
{_t("Buffering…")}
</div>
);
@ -94,22 +92,22 @@ export const VoiceBroadcastHeader: React.FC<VoiceBroadcastHeaderProps> = ({
title={_t("Change input device")}
>
<MicrophoneIcon className="mx_Icon mx_Icon_16" />
<span>{ microphoneLabel }</span>
<span>{microphoneLabel}</span>
</AccessibleTooltipButton>
);
return <div className="mx_VoiceBroadcastHeader">
<RoomAvatar room={room} width={32} height={32} />
<div className="mx_VoiceBroadcastHeader_content">
<div className="mx_VoiceBroadcastHeader_room">
{ room.name }
return (
<div className="mx_VoiceBroadcastHeader">
<RoomAvatar room={room} width={32} height={32} />
<div className="mx_VoiceBroadcastHeader_content">
<div className="mx_VoiceBroadcastHeader_room">{room.name}</div>
{microphoneLine}
{timeLeftLine}
{broadcast}
{buffering}
</div>
{ microphoneLine }
{ timeLeftLine }
{ broadcast }
{ buffering }
{liveBadge}
{closeButton}
</div>
{ liveBadge }
{ closeButton }
</div>;
);
};

View file

@ -20,8 +20,10 @@ import { Icon as LiveIcon } from "../../../../res/img/element-icons/live.svg";
import { _t } from "../../../languageHandler";
export const VoiceBroadcastRoomSubtitle = () => {
return <div className="mx_RoomTile_subtitle mx_RoomTile_subtitle--voice-broadcast">
<LiveIcon className="mx_Icon mx_Icon_16" />
{ _t("Live") }
</div>;
return (
<div className="mx_RoomTile_subtitle mx_RoomTile_subtitle--voice-broadcast">
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("Live")}
</div>
);
};

View file

@ -40,18 +40,8 @@ interface VoiceBroadcastPlaybackBodyProps {
playback: VoiceBroadcastPlayback;
}
export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProps> = ({
pip = false,
playback,
}) => {
const {
times,
liveness,
playbackState,
room,
sender,
toggle,
} = useVoiceBroadcastPlayback(playback);
export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProps> = ({ pip = false, playback }) => {
const { times, liveness, playbackState, room, sender, toggle } = useVoiceBroadcastPlayback(playback);
let controlIcon: React.FC<React.SVGProps<SVGSVGElement>>;
let controlLabel: string;
@ -75,12 +65,9 @@ export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProp
break;
}
const control = <VoiceBroadcastControl
className={className}
label={controlLabel}
icon={controlIcon}
onClick={toggle}
/>;
const control = (
<VoiceBroadcastControl className={className} label={controlLabel} icon={controlIcon} onClick={toggle} />
);
let seekBackwardButton: ReactElement | null = null;
let seekForwardButton: ReactElement | null = null;
@ -90,21 +77,17 @@ export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProp
playback.skipTo(Math.max(0, times.position - SEEK_TIME));
};
seekBackwardButton = <SeekButton
icon={Back30sIcon}
label={_t("30s backward")}
onClick={onSeekBackwardButtonClick}
/>;
seekBackwardButton = (
<SeekButton icon={Back30sIcon} label={_t("30s backward")} onClick={onSeekBackwardButtonClick} />
);
const onSeekForwardButtonClick = () => {
playback.skipTo(Math.min(times.duration, times.position + SEEK_TIME));
};
seekForwardButton = <SeekButton
icon={Forward30sIcon}
label={_t("30s forward")}
onClick={onSeekForwardButtonClick}
/>;
seekForwardButton = (
<SeekButton icon={Forward30sIcon} label={_t("30s forward")} onClick={onSeekForwardButtonClick} />
);
}
const classes = classNames({
@ -122,9 +105,9 @@ export const VoiceBroadcastPlaybackBody: React.FC<VoiceBroadcastPlaybackBodyProp
showBuffering={playbackState === VoiceBroadcastPlaybackState.Buffering}
/>
<div className="mx_VoiceBroadcastBody_controls">
{ seekBackwardButton }
{ control }
{ seekForwardButton }
{seekBackwardButton}
{control}
{seekForwardButton}
</div>
<SeekBar playback={playback} />
<div className="mx_VoiceBroadcastBody_timerow">

View file

@ -28,9 +28,7 @@ interface Props {
voiceBroadcastPreRecording: VoiceBroadcastPreRecording;
}
export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({
voiceBroadcastPreRecording,
}) => {
export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({ voiceBroadcastPreRecording }) => {
const pipRef = useRef<HTMLDivElement | null>(null);
const { currentDevice, currentDeviceLabel, devices, setDevice } = useAudioDeviceSelection();
const [showDeviceSelect, setShowDeviceSelect] = useState<boolean>(false);
@ -40,32 +38,31 @@ export const VoiceBroadcastPreRecordingPip: React.FC<Props> = ({
setDevice(device);
};
return <div
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
ref={pipRef}
>
<VoiceBroadcastHeader
onCloseClick={voiceBroadcastPreRecording.cancel}
onMicrophoneLineClick={() => setShowDeviceSelect(true)}
room={voiceBroadcastPreRecording.room}
microphoneLabel={currentDeviceLabel}
showClose={true}
/>
<AccessibleButton
className="mx_VoiceBroadcastBody_blockButton"
kind="danger"
onClick={voiceBroadcastPreRecording.start}
>
<LiveIcon className="mx_Icon mx_Icon_16" />
{ _t("Go live") }
</AccessibleButton>
{
showDeviceSelect && <DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
return (
<div className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip" ref={pipRef}>
<VoiceBroadcastHeader
onCloseClick={voiceBroadcastPreRecording.cancel}
onMicrophoneLineClick={() => setShowDeviceSelect(true)}
room={voiceBroadcastPreRecording.room}
microphoneLabel={currentDeviceLabel}
showClose={true}
/>
}
</div>;
<AccessibleButton
className="mx_VoiceBroadcastBody_blockButton"
kind="danger"
onClick={voiceBroadcastPreRecording.start}
>
<LiveIcon className="mx_Icon mx_Icon_16" />
{_t("Go live")}
</AccessibleButton>
{showDeviceSelect && (
<DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
/>
)}
</div>
);
};

View file

@ -20,19 +20,11 @@ interface VoiceBroadcastRecordingBodyProps {
}
export const VoiceBroadcastRecordingBody: React.FC<VoiceBroadcastRecordingBodyProps> = ({ recording }) => {
const {
live,
room,
sender,
} = useVoiceBroadcastRecording(recording);
const { live, room, sender } = useVoiceBroadcastRecording(recording);
return (
<div className="mx_VoiceBroadcastBody">
<VoiceBroadcastHeader
live={live ? "live" : "grey"}
microphoneLabel={sender?.name}
room={room}
/>
<VoiceBroadcastHeader live={live ? "live" : "grey"} microphoneLabel={sender?.name} room={room} />
</div>
);
};

View file

@ -16,11 +16,7 @@ limitations under the License.
import React, { useRef, useState } from "react";
import {
VoiceBroadcastControl,
VoiceBroadcastInfoState,
VoiceBroadcastRecording,
} from "../..";
import { VoiceBroadcastControl, VoiceBroadcastInfoState, VoiceBroadcastRecording } from "../..";
import { useVoiceBroadcastRecording } from "../../hooks/useVoiceBroadcastRecording";
import { VoiceBroadcastHeader } from "../atoms/VoiceBroadcastHeader";
import { Icon as StopIcon } from "../../../../res/img/element-icons/Stop.svg";
@ -38,14 +34,8 @@ interface VoiceBroadcastRecordingPipProps {
export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProps> = ({ recording }) => {
const pipRef = useRef<HTMLDivElement | null>(null);
const {
live,
timeLeft,
recordingState,
room,
stopRecording,
toggleRecording,
} = useVoiceBroadcastRecording(recording);
const { live, timeLeft, recordingState, room, stopRecording, toggleRecording } =
useVoiceBroadcastRecording(recording);
const { currentDevice, devices, setDevice } = useAudioDeviceSelection();
const onDeviceSelect = async (device: MediaDeviceInfo) => {
@ -70,46 +60,37 @@ export const VoiceBroadcastRecordingPip: React.FC<VoiceBroadcastRecordingPipProp
const [showDeviceSelect, setShowDeviceSelect] = useState<boolean>(false);
const toggleControl = recordingState === VoiceBroadcastInfoState.Paused
? <VoiceBroadcastControl
className="mx_VoiceBroadcastControl-recording"
onClick={toggleRecording}
icon={RecordIcon}
label={_t("resume voice broadcast")}
/>
: <VoiceBroadcastControl onClick={toggleRecording} icon={PauseIcon} label={_t("pause voice broadcast")} />;
return <div
className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip"
ref={pipRef}
>
<VoiceBroadcastHeader
live={live ? "live" : "grey"}
room={room}
timeLeft={timeLeft}
/>
<hr className="mx_VoiceBroadcastBody_divider" />
<div className="mx_VoiceBroadcastBody_controls">
{ toggleControl }
<AccessibleTooltipButton
onClick={() => setShowDeviceSelect(true)}
title={_t("Change input device")}
>
<MicrophoneIcon className="mx_Icon mx_Icon_16 mx_Icon_alert" />
</AccessibleTooltipButton>
const toggleControl =
recordingState === VoiceBroadcastInfoState.Paused ? (
<VoiceBroadcastControl
icon={StopIcon}
label="Stop Recording"
onClick={stopRecording}
className="mx_VoiceBroadcastControl-recording"
onClick={toggleRecording}
icon={RecordIcon}
label={_t("resume voice broadcast")}
/>
) : (
<VoiceBroadcastControl onClick={toggleRecording} icon={PauseIcon} label={_t("pause voice broadcast")} />
);
return (
<div className="mx_VoiceBroadcastBody mx_VoiceBroadcastBody--pip" ref={pipRef}>
<VoiceBroadcastHeader live={live ? "live" : "grey"} room={room} timeLeft={timeLeft} />
<hr className="mx_VoiceBroadcastBody_divider" />
<div className="mx_VoiceBroadcastBody_controls">
{toggleControl}
<AccessibleTooltipButton onClick={() => setShowDeviceSelect(true)} title={_t("Change input device")}>
<MicrophoneIcon className="mx_Icon mx_Icon_16 mx_Icon_alert" />
</AccessibleTooltipButton>
<VoiceBroadcastControl icon={StopIcon} label="Stop Recording" onClick={stopRecording} />
</div>
{showDeviceSelect && (
<DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
/>
)}
</div>
{
showDeviceSelect && <DevicesContextMenu
containerRef={pipRef}
currentDevice={currentDevice}
devices={devices}
onDeviceSelect={onDeviceSelect}
/>
}
</div>;
);
};

View file

@ -23,9 +23,7 @@ import {
VoiceBroadcastPlaybacksStoreEvent,
} from "../stores/VoiceBroadcastPlaybacksStore";
export const useCurrentVoiceBroadcastPlayback = (
voiceBroadcastPlaybackStore: VoiceBroadcastPlaybacksStore,
) => {
export const useCurrentVoiceBroadcastPlayback = (voiceBroadcastPlaybackStore: VoiceBroadcastPlaybacksStore) => {
const [currentVoiceBroadcastPlayback, setVoiceBroadcastPlayback] = useState(
voiceBroadcastPlaybackStore.getCurrent(),
);

View file

@ -26,11 +26,7 @@ export const useCurrentVoiceBroadcastPreRecording = (
voiceBroadcastPreRecordingStore.getCurrent(),
);
useTypedEventEmitter(
voiceBroadcastPreRecordingStore,
"changed",
setCurrentVoiceBroadcastPreRecording,
);
useTypedEventEmitter(voiceBroadcastPreRecordingStore, "changed", setCurrentVoiceBroadcastPreRecording);
return {
currentVoiceBroadcastPreRecording,

View file

@ -19,9 +19,7 @@ import { useState } from "react";
import { VoiceBroadcastRecordingsStore, VoiceBroadcastRecordingsStoreEvent } from "..";
import { useTypedEventEmitter } from "../../hooks/useEventEmitter";
export const useCurrentVoiceBroadcastRecording = (
voiceBroadcastRecordingsStore: VoiceBroadcastRecordingsStore,
) => {
export const useCurrentVoiceBroadcastRecording = (voiceBroadcastRecordingsStore: VoiceBroadcastRecordingsStore) => {
const [currentVoiceBroadcastRecording, setCurrentVoiceBroadcastRecording] = useState(
voiceBroadcastRecordingsStore.getCurrent(),
);

View file

@ -23,13 +23,9 @@ import { useTypedEventEmitter } from "../../hooks/useEventEmitter";
export const useHasRoomLiveVoiceBroadcast = (room: Room) => {
const [hasLiveVoiceBroadcast, setHasLiveVoiceBroadcast] = useState(hasRoomLiveVoiceBroadcast(room).hasBroadcast);
useTypedEventEmitter(
room.currentState,
RoomStateEvent.Update,
() => {
setHasLiveVoiceBroadcast(hasRoomLiveVoiceBroadcast(room).hasBroadcast);
},
);
useTypedEventEmitter(room.currentState, RoomStateEvent.Update, () => {
setHasLiveVoiceBroadcast(hasRoomLiveVoiceBroadcast(room).hasBroadcast);
});
return hasLiveVoiceBroadcast;
};

View file

@ -18,20 +18,14 @@ import { useState } from "react";
import { useTypedEventEmitter } from "../../hooks/useEventEmitter";
import { MatrixClientPeg } from "../../MatrixClientPeg";
import {
VoiceBroadcastPlayback,
VoiceBroadcastPlaybackEvent,
VoiceBroadcastPlaybackState,
} from "..";
import { VoiceBroadcastPlayback, VoiceBroadcastPlaybackEvent, VoiceBroadcastPlaybackState } from "..";
export const useVoiceBroadcastPlayback = (playback: VoiceBroadcastPlayback) => {
const client = MatrixClientPeg.get();
const room = client.getRoom(playback.infoEvent.getRoomId());
if (!room) {
throw new Error(
`Voice Broadcast room not found (event ${playback.infoEvent.getId()})`,
);
throw new Error(`Voice Broadcast room not found (event ${playback.infoEvent.getId()})`);
}
const playbackToggle = () => {
@ -52,18 +46,10 @@ export const useVoiceBroadcastPlayback = (playback: VoiceBroadcastPlayback) => {
position: playback.timeSeconds,
timeLeft: playback.timeLeftSeconds,
});
useTypedEventEmitter(
playback,
VoiceBroadcastPlaybackEvent.TimesChanged,
t => setTimes(t),
);
useTypedEventEmitter(playback, VoiceBroadcastPlaybackEvent.TimesChanged, (t) => setTimes(t));
const [liveness, setLiveness] = useState(playback.getLiveness());
useTypedEventEmitter(
playback,
VoiceBroadcastPlaybackEvent.LivenessChanged,
l => setLiveness(l),
);
useTypedEventEmitter(playback, VoiceBroadcastPlaybackEvent.LivenessChanged, (l) => setLiveness(l));
return {
times,

View file

@ -16,11 +16,7 @@ limitations under the License.
import React, { useState } from "react";
import {
VoiceBroadcastInfoState,
VoiceBroadcastRecording,
VoiceBroadcastRecordingEvent,
} from "..";
import { VoiceBroadcastInfoState, VoiceBroadcastRecording, VoiceBroadcastRecordingEvent } from "..";
import QuestionDialog from "../../components/views/dialogs/QuestionDialog";
import { useTypedEventEmitter } from "../../hooks/useEventEmitter";
import { _t } from "../../languageHandler";
@ -28,19 +24,18 @@ import { MatrixClientPeg } from "../../MatrixClientPeg";
import Modal from "../../Modal";
const showStopBroadcastingDialog = async (): Promise<boolean> => {
const { finished } = Modal.createDialog(
QuestionDialog,
{
title: _t("Stop live broadcasting?"),
description: (
<p>
{ _t("Are you sure you want to stop your live broadcast?"
+ "This will end the broadcast and the full recording will be available in the room.") }
</p>
),
button: _t("Yes, stop broadcast"),
},
);
const { finished } = Modal.createDialog(QuestionDialog, {
title: _t("Stop live broadcasting?"),
description: (
<p>
{_t(
"Are you sure you want to stop your live broadcast?" +
"This will end the broadcast and the full recording will be available in the room.",
)}
</p>
),
button: _t("Yes, stop broadcast"),
});
const [confirmed] = await finished;
return confirmed;
};
@ -72,16 +67,9 @@ export const useVoiceBroadcastRecording = (recording: VoiceBroadcastRecording) =
);
const [timeLeft, setTimeLeft] = useState(recording.getTimeLeft());
useTypedEventEmitter(
recording,
VoiceBroadcastRecordingEvent.TimeLeftChanged,
setTimeLeft,
);
useTypedEventEmitter(recording, VoiceBroadcastRecordingEvent.TimeLeftChanged, setTimeLeft);
const live = [
VoiceBroadcastInfoState.Started,
VoiceBroadcastInfoState.Resumed,
].includes(recordingState);
const live = [VoiceBroadcastInfoState.Started, VoiceBroadcastInfoState.Resumed].includes(recordingState);
return {
live,

View file

@ -60,14 +60,15 @@ interface EventMap {
[VoiceBroadcastPlaybackEvent.LivenessChanged]: (liveness: VoiceBroadcastLiveness) => void;
[VoiceBroadcastPlaybackEvent.StateChanged]: (
state: VoiceBroadcastPlaybackState,
playback: VoiceBroadcastPlayback
playback: VoiceBroadcastPlayback,
) => void;
[VoiceBroadcastPlaybackEvent.InfoStateChanged]: (state: VoiceBroadcastInfoState) => void;
}
export class VoiceBroadcastPlayback
extends TypedEventEmitter<VoiceBroadcastPlaybackEvent, EventMap>
implements IDestroyable, PlaybackInterface {
implements IDestroyable, PlaybackInterface
{
private state = VoiceBroadcastPlaybackState.Stopped;
private chunkEvents = new VoiceBroadcastChunkEvents();
private playbacks = new Map<string, Playback>();
@ -87,10 +88,7 @@ export class VoiceBroadcastPlayback
private chunkRelationHelper!: RelationsHelper;
private infoRelationHelper!: RelationsHelper;
public constructor(
public readonly infoEvent: MatrixEvent,
private client: MatrixClient,
) {
public constructor(public readonly infoEvent: MatrixEvent, private client: MatrixClient) {
super();
this.addInfoEvent(this.infoEvent);
this.infoEvent.on(MatrixEventEvent.BeforeRedaction, this.onBeforeRedaction);
@ -213,13 +211,10 @@ export class VoiceBroadcastPlayback
this.playbacks.delete(event.getId()!);
}
private onPlaybackPositionUpdate = (
event: MatrixEvent,
position: number,
): void => {
private onPlaybackPositionUpdate = (event: MatrixEvent, position: number): void => {
if (event !== this.currentlyPlaying) return;
const newPosition = this.chunkEvents.getLengthTo(event) + (position * 1000); // observable sends seconds
const newPosition = this.chunkEvents.getLengthTo(event) + position * 1000; // observable sends seconds
// do not jump backwards - this can happen when transiting from one to another chunk
if (newPosition < this.position) return;
@ -244,14 +239,11 @@ export class VoiceBroadcastPlayback
}
private emitTimesChanged(): void {
this.emit(
VoiceBroadcastPlaybackEvent.TimesChanged,
{
duration: this.durationSeconds,
position: this.timeSeconds,
timeLeft: this.timeLeftSeconds,
},
);
this.emit(VoiceBroadcastPlaybackEvent.TimesChanged, {
duration: this.durationSeconds,
position: this.timeSeconds,
timeLeft: this.timeLeftSeconds,
});
}
private onPlaybackStateChange = async (event: MatrixEvent, newState: PlaybackState): Promise<void> => {
@ -408,9 +400,10 @@ export class VoiceBroadcastPlayback
public async start(): Promise<void> {
const chunkEvents = this.chunkEvents.getEvents();
const toPlay = this.getInfoState() === VoiceBroadcastInfoState.Stopped
? chunkEvents[0] // start at the beginning for an ended voice broadcast
: chunkEvents[chunkEvents.length - 1]; // start at the current chunk for an ongoing voice broadcast
const toPlay =
this.getInfoState() === VoiceBroadcastInfoState.Stopped
? chunkEvents[0] // start at the beginning for an ended voice broadcast
: chunkEvents[chunkEvents.length - 1]; // start at the current chunk for an ongoing voice broadcast
if (toPlay) {
return this.playEvent(toPlay);
@ -499,7 +492,7 @@ export class VoiceBroadcastPlayback
this.removeAllListeners();
this.chunkEvents = new VoiceBroadcastChunkEvents();
this.playbacks.forEach(p => p.destroy());
this.playbacks.forEach((p) => p.destroy());
this.playbacks = new Map<string, Playback>();
}
}

View file

@ -25,12 +25,13 @@ import { startNewVoiceBroadcastRecording } from "../utils/startNewVoiceBroadcast
type VoiceBroadcastPreRecordingEvent = "dismiss";
interface EventMap {
"dismiss": (voiceBroadcastPreRecording: VoiceBroadcastPreRecording) => void;
dismiss: (voiceBroadcastPreRecording: VoiceBroadcastPreRecording) => void;
}
export class VoiceBroadcastPreRecording
extends TypedEventEmitter<VoiceBroadcastPreRecordingEvent, EventMap>
implements IDestroyable {
implements IDestroyable
{
public constructor(
public room: Room,
public sender: RoomMember,
@ -42,12 +43,7 @@ export class VoiceBroadcastPreRecording
}
public start = async (): Promise<void> => {
await startNewVoiceBroadcastRecording(
this.room,
this.client,
this.playbacksStore,
this.recordingsStore,
);
await startNewVoiceBroadcastRecording(this.room, this.client, this.playbacksStore, this.recordingsStore);
this.emit("dismiss", this);
};

View file

@ -56,7 +56,8 @@ interface EventMap {
export class VoiceBroadcastRecording
extends TypedEventEmitter<VoiceBroadcastRecordingEvent, EventMap>
implements IDestroyable {
implements IDestroyable
{
private state: VoiceBroadcastInfoState;
private recorder: VoiceBroadcastRecorder;
private sequence = 1;
@ -108,8 +109,8 @@ export class VoiceBroadcastRecording
private onChunkEvent = (event: MatrixEvent): void => {
if (
(!event.getId() && !event.getTxnId())
|| event.getContent()?.msgtype !== MsgType.Audio // don't add non-audio event
(!event.getId() && !event.getTxnId()) ||
event.getContent()?.msgtype !== MsgType.Audio // don't add non-audio event
) {
return;
}
@ -119,15 +120,19 @@ export class VoiceBroadcastRecording
private setInitialStateFromInfoEvent(): void {
const room = this.client.getRoom(this.infoEvent.getRoomId());
const relations = room?.getUnfilteredTimelineSet()?.relations?.getChildEventsForEvent(
this.infoEvent.getId(),
RelationType.Reference,
VoiceBroadcastInfoEventType,
);
const relations = room
?.getUnfilteredTimelineSet()
?.relations?.getChildEventsForEvent(
this.infoEvent.getId(),
RelationType.Reference,
VoiceBroadcastInfoEventType,
);
const relatedEvents = relations?.getRelations();
this.state = !relatedEvents?.find((event: MatrixEvent) => {
return event.getContent()?.state === VoiceBroadcastInfoState.Stopped;
}) ? VoiceBroadcastInfoState.Started : VoiceBroadcastInfoState.Stopped;
})
? VoiceBroadcastInfoState.Started
: VoiceBroadcastInfoState.Stopped;
}
public getTimeLeft(): number {
@ -244,12 +249,9 @@ export class VoiceBroadcastRecording
return uploadFile(
this.client,
this.infoEvent.getRoomId(),
new Blob(
[chunk.buffer],
{
type: this.getRecorder().contentType,
},
),
new Blob([chunk.buffer], {
type: this.getRecorder().contentType,
}),
);
}

View file

@ -35,7 +35,8 @@ interface EventMap {
*/
export class VoiceBroadcastPlaybacksStore
extends TypedEventEmitter<VoiceBroadcastPlaybacksStoreEvent, EventMap>
implements IDestroyable {
implements IDestroyable
{
private current: VoiceBroadcastPlayback | null;
/** Playbacks indexed by their info event id. */
@ -83,10 +84,7 @@ export class VoiceBroadcastPlaybacksStore
playback.on(VoiceBroadcastPlaybackEvent.StateChanged, this.onPlaybackStateChanged);
}
private onPlaybackStateChanged = (
state: VoiceBroadcastPlaybackState,
playback: VoiceBroadcastPlayback,
): void => {
private onPlaybackStateChanged = (state: VoiceBroadcastPlaybackState, playback: VoiceBroadcastPlayback): void => {
switch (state) {
case VoiceBroadcastPlaybackState.Buffering:
case VoiceBroadcastPlaybackState.Playing:

View file

@ -27,7 +27,8 @@ interface EventMap {
export class VoiceBroadcastPreRecordingStore
extends TypedEventEmitter<VoiceBroadcastPreRecordingEvent, EventMap>
implements IDestroyable {
implements IDestroyable
{
private current: VoiceBroadcastPreRecording | null = null;
public setCurrent(current: VoiceBroadcastPreRecording): void {

View file

@ -50,7 +50,7 @@ export class VoiceBroadcastChunkEvents {
}
public includes(event: MatrixEvent): boolean {
return !!this.events.find(e => this.equalByTxnIdOrId(event, e));
return !!this.events.find((e) => this.equalByTxnIdOrId(event, e));
}
/**
@ -98,20 +98,20 @@ export class VoiceBroadcastChunkEvents {
}
private calculateChunkLength(event: MatrixEvent): number {
return event.getContent()?.["org.matrix.msc1767.audio"]?.duration
|| event.getContent()?.info?.duration
|| 0;
return event.getContent()?.["org.matrix.msc1767.audio"]?.duration || event.getContent()?.info?.duration || 0;
}
private addOrReplaceEvent = (event: MatrixEvent): boolean => {
this.events = this.events.filter(e => !this.equalByTxnIdOrId(event, e));
this.events = this.events.filter((e) => !this.equalByTxnIdOrId(event, e));
this.events.push(event);
return true;
};
private equalByTxnIdOrId(eventA: MatrixEvent, eventB: MatrixEvent): boolean {
return eventA.getTxnId() && eventB.getTxnId() && eventA.getTxnId() === eventB.getTxnId()
|| eventA.getId() === eventB.getId();
return (
(eventA.getTxnId() && eventB.getTxnId() && eventA.getTxnId() === eventB.getTxnId()) ||
eventA.getId() === eventB.getId()
);
}
/**

View file

@ -25,9 +25,7 @@ import { findRoomLiveVoiceBroadcastFromUserAndDevice } from "./findRoomLiveVoice
* Handles voice broadcasts on app resume (after logging in, reload, crash).
*/
export class VoiceBroadcastResumer implements IDestroyable {
public constructor(
private client: MatrixClient,
) {
public constructor(private client: MatrixClient) {
if (client.isInitialSyncComplete()) {
this.resume();
} else {
@ -80,9 +78,10 @@ export class VoiceBroadcastResumer implements IDestroyable {
};
// all events should reference the started event
const referencedEventId = infoEvent.getContent()?.state === VoiceBroadcastInfoState.Started
? infoEvent.getId()
: infoEvent.getContent()?.["m.relates_to"]?.event_id;
const referencedEventId =
infoEvent.getContent()?.state === VoiceBroadcastInfoState.Started
? infoEvent.getId()
: infoEvent.getContent()?.["m.relates_to"]?.event_id;
if (referencedEventId) {
content["m.relates_to"] = {

View file

@ -25,8 +25,14 @@ import Modal from "../../Modal";
const showAlreadyRecordingDialog = () => {
Modal.createDialog(InfoDialog, {
title: _t("Can't start a new voice broadcast"),
description: <p>{ _t("You are already recording a voice broadcast. "
+ "Please end your current voice broadcast to start a new one.") }</p>,
description: (
<p>
{_t(
"You are already recording a voice broadcast. " +
"Please end your current voice broadcast to start a new one.",
)}
</p>
),
hasCloseButton: true,
});
};
@ -34,8 +40,14 @@ const showAlreadyRecordingDialog = () => {
const showInsufficientPermissionsDialog = () => {
Modal.createDialog(InfoDialog, {
title: _t("Can't start a new voice broadcast"),
description: <p>{ _t("You don't have the required permissions to start a voice broadcast in this room. "
+ "Contact a room administrator to upgrade your permissions.") }</p>,
description: (
<p>
{_t(
"You don't have the required permissions to start a voice broadcast in this room. " +
"Contact a room administrator to upgrade your permissions.",
)}
</p>
),
hasCloseButton: true,
});
};
@ -43,8 +55,14 @@ const showInsufficientPermissionsDialog = () => {
const showOthersAlreadyRecordingDialog = () => {
Modal.createDialog(InfoDialog, {
title: _t("Can't start a new voice broadcast"),
description: <p>{ _t("Someone else is already recording a voice broadcast. "
+ "Wait for their voice broadcast to end to start a new one.") }</p>,
description: (
<p>
{_t(
"Someone else is already recording a voice broadcast. " +
"Wait for their voice broadcast to end to start a new one.",
)}
</p>
),
hasCloseButton: true,
});
};

View file

@ -23,7 +23,5 @@ import SdkConfig, { DEFAULTS } from "../../SdkConfig";
* - If that fails fall back to 120 (two minutes)
*/
export const getChunkLength = (): number => {
return SdkConfig.get("voice_broadcast")?.chunk_length
|| DEFAULTS.voice_broadcast?.chunk_length
|| 120;
return SdkConfig.get("voice_broadcast")?.chunk_length || DEFAULTS.voice_broadcast?.chunk_length || 120;
};

View file

@ -23,7 +23,5 @@ import SdkConfig, { DEFAULTS } from "../../SdkConfig";
* - If that fails fall back to four hours
*/
export const getMaxBroadcastLength = (): number => {
return SdkConfig.get("voice_broadcast")?.max_length
|| DEFAULTS.voice_broadcast?.max_length
|| 4 * 60 * 60;
return SdkConfig.get("voice_broadcast")?.max_length || DEFAULTS.voice_broadcast?.max_length || 4 * 60 * 60;
};

View file

@ -24,7 +24,5 @@ export const shouldDisplayAsVoiceBroadcastRecordingTile = (
event: MatrixEvent,
): boolean => {
const userId = client.getUserId();
return !!userId
&& userId === event.getSender()
&& state !== VoiceBroadcastInfoState.Stopped;
return !!userId && userId === event.getSender() && state !== VoiceBroadcastInfoState.Stopped;
};

View file

@ -18,10 +18,6 @@ import { MatrixEvent } from "matrix-js-sdk/src/matrix";
import { VoiceBroadcastInfoEventType, VoiceBroadcastInfoState } from "..";
export const shouldDisplayAsVoiceBroadcastTile = (event: MatrixEvent) => (
event.getType?.() === VoiceBroadcastInfoEventType
&& (
event.getContent?.()?.state === VoiceBroadcastInfoState.Started
|| event.isRedacted()
)
);
export const shouldDisplayAsVoiceBroadcastTile = (event: MatrixEvent) =>
event.getType?.() === VoiceBroadcastInfoEventType &&
(event.getContent?.()?.state === VoiceBroadcastInfoState.Started || event.isRedacted());

View file

@ -51,10 +51,7 @@ const startBroadcast = async (
if (voiceBroadcastEvent?.getId() === result.event_id) {
room.off(RoomStateEvent.Events, onRoomStateEvents);
const recording = new VoiceBroadcastRecording(
voiceBroadcastEvent,
client,
);
const recording = new VoiceBroadcastRecording(voiceBroadcastEvent, client);
recordingsStore.setCurrent(recording);
recording.start();
resolve(recording);