Handle basic state machine of recordings

This commit is contained in:
Travis Ralston 2021-04-20 21:30:21 -06:00
parent afd53d8b53
commit 32e3ce3dea
9 changed files with 241 additions and 74 deletions

View file

@ -17,7 +17,7 @@ limitations under the License.
import AccessibleTooltipButton from "../elements/AccessibleTooltipButton";
import {_t} from "../../../languageHandler";
import React from "react";
import {VoiceRecording} from "../../../voice/VoiceRecording";
import {RecordingState, VoiceRecording} from "../../../voice/VoiceRecording";
import {Room} from "matrix-js-sdk/src/models/room";
import {MatrixClientPeg} from "../../../MatrixClientPeg";
import classNames from "classnames";
@ -25,6 +25,8 @@ import LiveRecordingWaveform from "../voice_messages/LiveRecordingWaveform";
import {replaceableComponent} from "../../../utils/replaceableComponent";
import LiveRecordingClock from "../voice_messages/LiveRecordingClock";
import {VoiceRecordingStore} from "../../../stores/VoiceRecordingStore";
import {UPDATE_EVENT} from "../../../stores/AsyncStore";
import PlaybackWaveform from "../voice_messages/PlaybackWaveform";
interface IProps {
room: Room;
@ -32,6 +34,7 @@ interface IProps {
interface IState {
recorder?: VoiceRecording;
recordingPhase?: RecordingState;
}
/**
@ -43,87 +46,126 @@ export default class VoiceRecordComposerTile extends React.PureComponent<IProps,
super(props);
this.state = {
recorder: null, // not recording by default
recorder: null, // no recording started by default
};
}
private onStartStopVoiceMessage = async () => {
// TODO: @@ TravisR: We do not want to auto-send on stop.
public async componentWillUnmount() {
await VoiceRecordingStore.instance.disposeRecording();
}
// called by composer
public async send() {
if (!this.state.recorder) {
throw new Error("No recording started - cannot send anything");
}
await this.state.recorder.stop();
const mxc = await this.state.recorder.upload();
MatrixClientPeg.get().sendMessage(this.props.room.roomId, {
"body": "Voice message",
"msgtype": "org.matrix.msc2516.voice",
//"msgtype": MsgType.Audio,
"url": mxc,
"info": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
// MSC1767 experiment
"org.matrix.msc1767.text": "Voice message",
"org.matrix.msc1767.file": {
url: mxc,
name: "Voice message.ogg",
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
"org.matrix.msc1767.audio": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// TODO: @@ TravisR: Waveform? (MSC1767 decision)
},
"org.matrix.experimental.msc2516.voice": { // MSC2516+MSC1767 experiment
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// Events can't have floats, so we try to maintain resolution by using 1024
// as a maximum value. The waveform contains values between zero and 1, so this
// should come out largely sane.
//
// We're expecting about one data point per second of audio.
waveform: this.state.recorder.finalWaveform.map(v => Math.round(v * 1024)),
},
});
await VoiceRecordingStore.instance.disposeRecording();
this.setState({recorder: null});
}
private onRecordStartEndClick = async () => {
if (this.state.recorder) {
await this.state.recorder.stop();
const mxc = await this.state.recorder.upload();
MatrixClientPeg.get().sendMessage(this.props.room.roomId, {
"body": "Voice message",
"msgtype": "org.matrix.msc2516.voice",
//"msgtype": MsgType.Audio,
"url": mxc,
"info": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
// MSC1767 experiment
"org.matrix.msc1767.text": "Voice message",
"org.matrix.msc1767.file": {
url: mxc,
name: "Voice message.ogg",
mimetype: this.state.recorder.contentType,
size: this.state.recorder.contentLength,
},
"org.matrix.msc1767.audio": {
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// TODO: @@ TravisR: Waveform? (MSC1767 decision)
},
"org.matrix.experimental.msc2516.voice": { // MSC2516+MSC1767 experiment
duration: Math.round(this.state.recorder.durationSeconds * 1000),
// Events can't have floats, so we try to maintain resolution by using 1024
// as a maximum value. The waveform contains values between zero and 1, so this
// should come out largely sane.
//
// We're expecting about one data point per second of audio.
waveform: this.state.recorder.finalWaveform.map(v => Math.round(v * 1024)),
},
});
await VoiceRecordingStore.instance.disposeRecording();
this.setState({recorder: null});
return;
}
const recorder = VoiceRecordingStore.instance.startRecording();
await recorder.start();
this.setState({recorder});
// We don't need to remove the listener: the recorder will clean that up for us.
recorder.on(UPDATE_EVENT, (ev: RecordingState) => {
if (ev === RecordingState.EndingSoon) return; // ignore this state: it has no UI purpose here
this.setState({recordingPhase: ev});
});
this.setState({recorder, recordingPhase: RecordingState.Started});
};
private renderWaveformArea() {
if (!this.state.recorder) return null;
return <div className='mx_VoiceRecordComposerTile_waveformContainer'>
<LiveRecordingClock recorder={this.state.recorder} />
<LiveRecordingWaveform recorder={this.state.recorder} />
const classes = classNames({
'mx_VoiceRecordComposerTile_waveformContainer': true,
'mx_VoiceRecordComposerTile_recording': this.state.recordingPhase === RecordingState.Started,
});
const clock = <LiveRecordingClock recorder={this.state.recorder} />;
let waveform = <LiveRecordingWaveform recorder={this.state.recorder} />;
if (this.state.recordingPhase !== RecordingState.Started) {
waveform = <PlaybackWaveform recorder={this.state.recorder} />;
}
return <div className={classes}>
{clock}
{waveform}
</div>;
}
public render() {
const classes = classNames({
'mx_MessageComposer_button': !this.state.recorder,
'mx_MessageComposer_voiceMessage': !this.state.recorder,
'mx_VoiceRecordComposerTile_stop': !!this.state.recorder,
});
let recordingInfo;
if (!this.state.recordingPhase || this.state.recordingPhase === RecordingState.Started) {
const classes = classNames({
'mx_MessageComposer_button': !this.state.recorder,
'mx_MessageComposer_voiceMessage': !this.state.recorder,
'mx_VoiceRecordComposerTile_stop': this.state.recorder?.isRecording,
});
let tooltip = _t("Record a voice message");
if (!!this.state.recorder) {
// TODO: @@ TravisR: Change to match behaviour
tooltip = _t("Stop & send recording");
let tooltip = _t("Record a voice message");
if (!!this.state.recorder) {
tooltip = _t("Stop the recording");
}
let stopOrRecordBtn = <AccessibleTooltipButton
className={classes}
onClick={this.onRecordStartEndClick}
title={tooltip}
/>;
if (this.state.recorder && !this.state.recorder?.isRecording) {
stopOrRecordBtn = null;
}
recordingInfo = stopOrRecordBtn;
}
return (<>
{this.renderWaveformArea()}
<AccessibleTooltipButton
className={classes}
onClick={this.onStartStopVoiceMessage}
title={tooltip}
/>
{recordingInfo}
</>);
}
}