mirror of
https://github.com/vector-im/hydrogen-web.git
synced 2024-12-23 19:45:05 +01:00
Merge branch 'bwindels/calls' into thirdroom/dev
This commit is contained in:
commit
f6a0986b3c
@ -47,7 +47,8 @@ const assetPaths = {
|
||||
wasmBundle: olmJsPath
|
||||
}
|
||||
};
|
||||
import "hydrogen-view-sdk/style.css";
|
||||
import "hydrogen-view-sdk/theme-element-light.css";
|
||||
// OR import "hydrogen-view-sdk/theme-element-dark.css";
|
||||
|
||||
async function main() {
|
||||
const app = document.querySelector<HTMLDivElement>('#app')!
|
||||
|
@ -39,7 +39,7 @@ function colorsFromURL(url, colorMap) {
|
||||
function processURL(decl, replacer, colorMap) {
|
||||
const value = decl.value;
|
||||
const parsed = valueParser(value);
|
||||
parsed.walk(async node => {
|
||||
parsed.walk(node => {
|
||||
if (node.type !== "function" || node.value !== "url") {
|
||||
return;
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ module.exports.buildColorizedSVG = function (svgLocation, primaryColor, secondar
|
||||
if (svgCode === coloredSVGCode) {
|
||||
throw new Error("svg-colorizer made no color replacements! The input svg should only contain colors #ff00ff (primary, case-sensitive) and #00ffff (secondary, case-sensitive).");
|
||||
}
|
||||
const fileName = svgLocation.match(/.+\/(.+\.svg)/)[1];
|
||||
const fileName = svgLocation.match(/.+[/\\](.+\.svg)/)[1];
|
||||
const outputName = `${fileName.substring(0, fileName.length - 4)}-${createHash(coloredSVGCode)}.svg`;
|
||||
const outputPath = path.resolve(__dirname, "../../.tmp");
|
||||
try {
|
||||
|
@ -1,4 +1,8 @@
|
||||
#!/bin/bash
|
||||
# Exit whenever one of the commands fail with a non-zero exit code
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
rm -rf target
|
||||
yarn run vite build -c vite.sdk-assets-config.js
|
||||
yarn run vite build -c vite.sdk-lib-config.js
|
||||
|
23
src/domain/AvatarSource.ts
Normal file
23
src/domain/AvatarSource.ts
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
Copyright 2020 Bruno Windels <bruno@windels.cloud>
|
||||
Copyright 2020, 2021 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
export interface AvatarSource {
|
||||
get avatarLetter(): string;
|
||||
get avatarColorNumber(): number;
|
||||
avatarUrl(size: number): string | undefined;
|
||||
get avatarTitle(): string;
|
||||
}
|
@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {SortedArray} from "../observable/index.js";
|
||||
import {SortedArray} from "../observable/index";
|
||||
import {ViewModel} from "./ViewModel";
|
||||
import {avatarInitials, getIdentifierColorNumber} from "./avatar";
|
||||
|
||||
|
@ -51,10 +51,10 @@ export function getIdentifierColorNumber(id: string): number {
|
||||
return (hashCode(id) % 8) + 1;
|
||||
}
|
||||
|
||||
export function getAvatarHttpUrl(avatarUrl: string, cssSize: number, platform: Platform, mediaRepository: MediaRepository): string | null {
|
||||
export function getAvatarHttpUrl(avatarUrl: string | undefined, cssSize: number, platform: Platform, mediaRepository: MediaRepository): string | undefined {
|
||||
if (avatarUrl) {
|
||||
const imageSize = cssSize * platform.devicePixelRatio;
|
||||
return mediaRepository.mxcUrlThumbnail(avatarUrl, imageSize, imageSize, "crop");
|
||||
}
|
||||
return null;
|
||||
return undefined;
|
||||
}
|
||||
|
@ -14,23 +14,34 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {AvatarSource} from "../../AvatarSource";
|
||||
import {ViewModel, Options as BaseOptions} from "../../ViewModel";
|
||||
import {getStreamVideoTrack, getStreamAudioTrack} from "../../../matrix/calls/common";
|
||||
import {avatarInitials, getIdentifierColorNumber, getAvatarHttpUrl} from "../../avatar";
|
||||
import {EventObservableValue} from "../../../observable/value/EventObservableValue";
|
||||
import {ObservableValueMap} from "../../../observable/map/ObservableValueMap";
|
||||
import type {GroupCall} from "../../../matrix/calls/group/GroupCall";
|
||||
import type {Member} from "../../../matrix/calls/group/Member";
|
||||
import type {BaseObservableList} from "../../../observable/list/BaseObservableList";
|
||||
import type {Stream} from "../../../platform/types/MediaDevices";
|
||||
import type {MediaRepository} from "../../../matrix/net/MediaRepository";
|
||||
|
||||
type Options = BaseOptions & {call: GroupCall};
|
||||
type Options = BaseOptions & {
|
||||
call: GroupCall,
|
||||
mediaRepository: MediaRepository
|
||||
};
|
||||
|
||||
export class CallViewModel extends ViewModel<Options> {
|
||||
|
||||
public readonly memberViewModels: BaseObservableList<CallMemberViewModel>;
|
||||
public readonly memberViewModels: BaseObservableList<IStreamViewModel>;
|
||||
|
||||
constructor(options: Options) {
|
||||
super(options);
|
||||
this.memberViewModels = this.getOption("call").members
|
||||
const ownMemberViewModelMap = new ObservableValueMap("self", new EventObservableValue(this.call, "change"))
|
||||
.mapValues(call => new OwnMemberViewModel(this.childOptions({call: this.call, mediaRepository: this.getOption("mediaRepository")})), () => {});
|
||||
this.memberViewModels = this.call.members
|
||||
.filterValues(member => member.isConnected)
|
||||
.mapValues(member => new CallMemberViewModel(this.childOptions({member})))
|
||||
.mapValues(member => new CallMemberViewModel(this.childOptions({member, mediaRepository: this.getOption("mediaRepository")})))
|
||||
.join(ownMemberViewModelMap)
|
||||
.sortValues((a, b) => a.compare(b));
|
||||
}
|
||||
|
||||
@ -46,7 +57,7 @@ export class CallViewModel extends ViewModel<Options> {
|
||||
return this.call.id;
|
||||
}
|
||||
|
||||
get localStream(): Stream | undefined {
|
||||
get stream(): Stream | undefined {
|
||||
return this.call.localMedia?.userMedia;
|
||||
}
|
||||
|
||||
@ -55,11 +66,66 @@ export class CallViewModel extends ViewModel<Options> {
|
||||
this.call.leave();
|
||||
}
|
||||
}
|
||||
|
||||
get isCameraMuted(): boolean {
|
||||
return this.call.muteSettings.camera;
|
||||
}
|
||||
|
||||
get isMicrophoneMuted(): boolean {
|
||||
return this.call.muteSettings.microphone;
|
||||
}
|
||||
|
||||
async toggleVideo() {
|
||||
this.call.setMuted(this.call.muteSettings.toggleCamera());
|
||||
}
|
||||
}
|
||||
|
||||
type MemberOptions = BaseOptions & {member: Member};
|
||||
type OwnMemberOptions = BaseOptions & {
|
||||
call: GroupCall,
|
||||
mediaRepository: MediaRepository
|
||||
}
|
||||
|
||||
export class CallMemberViewModel extends ViewModel<MemberOptions> {
|
||||
class OwnMemberViewModel extends ViewModel<OwnMemberOptions> implements IStreamViewModel {
|
||||
get stream(): Stream | undefined {
|
||||
return this.call.localMedia?.userMedia;
|
||||
}
|
||||
|
||||
private get call(): GroupCall {
|
||||
return this.getOption("call");
|
||||
}
|
||||
|
||||
get isCameraMuted(): boolean {
|
||||
return this.call.muteSettings.camera ?? !!getStreamVideoTrack(this.stream);
|
||||
}
|
||||
|
||||
get isMicrophoneMuted(): boolean {
|
||||
return this.call.muteSettings.microphone ?? !!getStreamAudioTrack(this.stream);
|
||||
}
|
||||
|
||||
get avatarLetter(): string {
|
||||
return "I";
|
||||
}
|
||||
|
||||
get avatarColorNumber(): number {
|
||||
return 3;
|
||||
}
|
||||
|
||||
avatarUrl(size: number): string | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
get avatarTitle(): string {
|
||||
return "Me";
|
||||
}
|
||||
|
||||
compare(other: OwnMemberViewModel | CallMemberViewModel): number {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
type MemberOptions = BaseOptions & {member: Member, mediaRepository: MediaRepository};
|
||||
|
||||
export class CallMemberViewModel extends ViewModel<MemberOptions> implements IStreamViewModel {
|
||||
get stream(): Stream | undefined {
|
||||
return this.member.remoteMedia?.userMedia;
|
||||
}
|
||||
@ -68,7 +134,36 @@ export class CallMemberViewModel extends ViewModel<MemberOptions> {
|
||||
return this.getOption("member");
|
||||
}
|
||||
|
||||
compare(other: CallMemberViewModel): number {
|
||||
get isCameraMuted(): boolean {
|
||||
return this.member.remoteMuteSettings?.camera ?? !getStreamVideoTrack(this.stream);
|
||||
}
|
||||
|
||||
get isMicrophoneMuted(): boolean {
|
||||
return this.member.remoteMuteSettings?.microphone ?? !getStreamAudioTrack(this.stream);
|
||||
}
|
||||
|
||||
get avatarLetter(): string {
|
||||
return avatarInitials(this.member.member.name);
|
||||
}
|
||||
|
||||
get avatarColorNumber(): number {
|
||||
return getIdentifierColorNumber(this.member.userId);
|
||||
}
|
||||
|
||||
avatarUrl(size: number): string | undefined {
|
||||
const {avatarUrl} = this.member.member;
|
||||
const mediaRepository = this.getOption("mediaRepository");
|
||||
return getAvatarHttpUrl(avatarUrl, size, this.platform, mediaRepository);
|
||||
}
|
||||
|
||||
get avatarTitle(): string {
|
||||
return this.member.member.name;
|
||||
}
|
||||
|
||||
compare(other: OwnMemberViewModel | CallMemberViewModel): number {
|
||||
if (other instanceof OwnMemberViewModel) {
|
||||
return -other.compare(this);
|
||||
}
|
||||
const myUserId = this.member.member.userId;
|
||||
const otherUserId = other.member.member.userId;
|
||||
if(myUserId === otherUserId) {
|
||||
@ -77,3 +172,9 @@ export class CallMemberViewModel extends ViewModel<MemberOptions> {
|
||||
return myUserId < otherUserId ? -1 : 1;
|
||||
}
|
||||
}
|
||||
|
||||
export interface IStreamViewModel extends AvatarSource, ViewModel {
|
||||
get stream(): Stream | undefined;
|
||||
get isCameraMuted(): boolean;
|
||||
get isMicrophoneMuted(): boolean;
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ export class RoomViewModel extends ViewModel {
|
||||
}
|
||||
this._callViewModel = this.disposeTracked(this._callViewModel);
|
||||
if (call) {
|
||||
this._callViewModel = this.track(new CallViewModel(this.childOptions({call})));
|
||||
this._callViewModel = this.track(new CallViewModel(this.childOptions({call, mediaRepository: this._room.mediaRepository})));
|
||||
}
|
||||
this.emitChange("callViewModel");
|
||||
}));
|
||||
@ -367,9 +367,8 @@ export class RoomViewModel extends ViewModel {
|
||||
const session = this.getOption("session");
|
||||
const stream = await this.platform.mediaDevices.getMediaTracks(false, true);
|
||||
const localMedia = new LocalMedia().withUserMedia(stream);
|
||||
await this._call.join(localMedia);
|
||||
// this will set the callViewModel above as a call will be added to callHandler.calls
|
||||
const call = await session.callHandler.createCall(this._room.id, localMedia, "A call " + Math.round(this.platform.random() * 100));
|
||||
const call = await session.callHandler.createCall(this._room.id, "m.video", "A call " + Math.round(this.platform.random() * 100));
|
||||
await call.join(localMedia);
|
||||
} catch (err) {
|
||||
console.error(err.stack);
|
||||
|
@ -21,7 +21,7 @@ import {RoomStatus} from "./room/common";
|
||||
import {RoomBeingCreated} from "./room/RoomBeingCreated";
|
||||
import {Invite} from "./room/Invite.js";
|
||||
import {Pusher} from "./push/Pusher";
|
||||
import { ObservableMap } from "../observable/index.js";
|
||||
import { ObservableMap } from "../observable/index";
|
||||
import {User} from "./User.js";
|
||||
import {DeviceMessageHandler} from "./DeviceMessageHandler.js";
|
||||
import {Account as E2EEAccount} from "./e2ee/Account.js";
|
||||
|
@ -15,8 +15,8 @@ limitations under the License.
|
||||
*/
|
||||
|
||||
import {ObservableMap} from "../../observable/map/ObservableMap";
|
||||
import {WebRTC, PeerConnection, PeerConnectionHandler} from "../../platform/types/WebRTC";
|
||||
import {MediaDevices, Track, AudioTrack} from "../../platform/types/MediaDevices";
|
||||
import {WebRTC, PeerConnection} from "../../platform/types/WebRTC";
|
||||
import {MediaDevices, Track} from "../../platform/types/MediaDevices";
|
||||
import {handlesEventType} from "./PeerCall";
|
||||
import {EventType, CallIntent} from "./callEventTypes";
|
||||
import {GroupCall} from "./group/GroupCall";
|
||||
@ -107,7 +107,7 @@ export class CallHandler {
|
||||
});
|
||||
}
|
||||
|
||||
async createCall(roomId: string, callType: "m.video" | "m.voice", name: string, intent: CallIntent = CallIntent.Ring): Promise<GroupCall> {
|
||||
async createCall(roomId: string, type: "m.video" | "m.voice", name: string, intent: CallIntent = CallIntent.Ring): Promise<GroupCall> {
|
||||
const logItem = this.options.logger.child({l: "call", incoming: false});
|
||||
const call = new GroupCall(makeId("conf-"), true, {
|
||||
"m.name": name,
|
||||
@ -116,7 +116,7 @@ export class CallHandler {
|
||||
this._calls.set(call.id, call);
|
||||
|
||||
try {
|
||||
await call.create(callType);
|
||||
await call.create(type);
|
||||
// store call info so it will ring again when reopening the app
|
||||
const txn = await this.options.storage.readWriteTxn([this.options.storage.storeNames.calls]);
|
||||
txn.calls.add({
|
||||
|
@ -17,6 +17,7 @@ limitations under the License.
|
||||
import {SDPStreamMetadataPurpose} from "./callEventTypes";
|
||||
import {Stream} from "../../platform/types/MediaDevices";
|
||||
import {SDPStreamMetadata} from "./callEventTypes";
|
||||
import {getStreamVideoTrack, getStreamAudioTrack} from "./common";
|
||||
|
||||
export class LocalMedia {
|
||||
constructor(
|
||||
@ -37,13 +38,44 @@ export class LocalMedia {
|
||||
return new LocalMedia(this.userMedia, this.screenShare, options);
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
replaceClone(oldClone: LocalMedia | undefined, oldOriginal: LocalMedia | undefined): LocalMedia {
|
||||
let userMedia;
|
||||
let screenShare;
|
||||
const cloneOrAdoptStream = (oldOriginalStream: Stream | undefined, oldCloneStream: Stream | undefined, newStream: Stream | undefined): Stream | undefined => {
|
||||
let stream;
|
||||
if (oldOriginalStream?.id === newStream?.id) {
|
||||
stream = oldCloneStream;
|
||||
} else {
|
||||
stream = newStream?.clone();
|
||||
getStreamAudioTrack(oldCloneStream)?.stop();
|
||||
getStreamVideoTrack(oldCloneStream)?.stop();
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
return new LocalMedia(
|
||||
cloneOrAdoptStream(oldOriginal?.userMedia, oldClone?.userMedia, this.userMedia),
|
||||
cloneOrAdoptStream(oldOriginal?.screenShare, oldClone?.screenShare, this.screenShare),
|
||||
this.dataChannelOptions
|
||||
);
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
clone(): LocalMedia {
|
||||
return new LocalMedia(this.userMedia?.clone(), this.screenShare?.clone(), this.dataChannelOptions);
|
||||
return new LocalMedia(this.userMedia?.clone(),this.screenShare?.clone(), this.dataChannelOptions);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.userMedia?.audioTrack?.stop();
|
||||
this.userMedia?.videoTrack?.stop();
|
||||
this.screenShare?.videoTrack?.stop();
|
||||
this.stopExcept(undefined);
|
||||
}
|
||||
|
||||
stopExcept(newMedia: LocalMedia | undefined) {
|
||||
if(newMedia?.userMedia?.id !== this.userMedia?.id) {
|
||||
getStreamAudioTrack(this.userMedia)?.stop();
|
||||
getStreamVideoTrack(this.userMedia)?.stop();
|
||||
}
|
||||
if(newMedia?.screenShare?.id !== this.screenShare?.id) {
|
||||
getStreamVideoTrack(this.screenShare)?.stop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,25 +16,26 @@ limitations under the License.
|
||||
|
||||
import {ObservableMap} from "../../observable/map/ObservableMap";
|
||||
import {recursivelyAssign} from "../../utils/recursivelyAssign";
|
||||
import {AsyncQueue} from "../../utils/AsyncQueue";
|
||||
import {Disposables, IDisposable} from "../../utils/Disposables";
|
||||
import type {Room} from "../room/Room";
|
||||
import type {StateEvent} from "../storage/types";
|
||||
import type {ILogItem} from "../../logging/types";
|
||||
|
||||
import type {TimeoutCreator, Timeout} from "../../platform/types/types";
|
||||
import {WebRTC, PeerConnection, PeerConnectionHandler, TrackSender, TrackReceiver} from "../../platform/types/WebRTC";
|
||||
import {MediaDevices, Track, AudioTrack, Stream} from "../../platform/types/MediaDevices";
|
||||
import type {LocalMedia} from "./LocalMedia";
|
||||
|
||||
import {Disposables, Disposable, IDisposable} from "../../utils/Disposables";
|
||||
import {WebRTC, PeerConnection, Transceiver, TransceiverDirection, Sender, Receiver, PeerConnectionEventMap} from "../../platform/types/WebRTC";
|
||||
import {MediaDevices, Track, TrackKind, Stream, StreamTrackEvent} from "../../platform/types/MediaDevices";
|
||||
import {getStreamVideoTrack, getStreamAudioTrack, MuteSettings} from "./common";
|
||||
import {
|
||||
SDPStreamMetadataKey,
|
||||
SDPStreamMetadataPurpose,
|
||||
EventType,
|
||||
CallErrorCode,
|
||||
} from "./callEventTypes";
|
||||
|
||||
import type {Room} from "../room/Room";
|
||||
import type {StateEvent} from "../storage/types";
|
||||
import type {ILogItem} from "../../logging/types";
|
||||
import type {TimeoutCreator, Timeout} from "../../platform/types/types";
|
||||
import type {LocalMedia} from "./LocalMedia";
|
||||
import type {
|
||||
MCallBase,
|
||||
MCallInvite,
|
||||
MCallNegotiate,
|
||||
MCallAnswer,
|
||||
MCallSDPStreamMetadataChanged,
|
||||
MCallCandidates,
|
||||
@ -66,7 +67,9 @@ export class PeerCall implements IDisposable {
|
||||
private readonly peerConnection: PeerConnection;
|
||||
private _state = CallState.Fledgling;
|
||||
private direction: CallDirection;
|
||||
// we don't own localMedia and should hence not call dispose on it from here
|
||||
private localMedia?: LocalMedia;
|
||||
private localMuteSettings?: MuteSettings;
|
||||
private seq: number = 0;
|
||||
// A queue for candidates waiting to go out.
|
||||
// We try to amalgamate candidates into a single candidate message where
|
||||
@ -83,7 +86,8 @@ export class PeerCall implements IDisposable {
|
||||
private hangupParty: CallParty;
|
||||
private disposables = new Disposables();
|
||||
private statePromiseMap = new Map<CallState, {resolve: () => void, promise: Promise<void>}>();
|
||||
|
||||
private _remoteTrackToStreamId = new Map<string, string>();
|
||||
private _remoteStreams = new Map<string, {stream: Stream, disposeListener: Disposable}>();
|
||||
// perfect negotiation flags
|
||||
private makingOffer: boolean = false;
|
||||
private ignoreOffer: boolean = false;
|
||||
@ -94,55 +98,62 @@ export class PeerCall implements IDisposable {
|
||||
private _dataChannel?: any;
|
||||
private _hangupReason?: CallErrorCode;
|
||||
private _remoteMedia: RemoteMedia;
|
||||
private _remoteMuteSettings = new MuteSettings();
|
||||
|
||||
constructor(
|
||||
private callId: string,
|
||||
private readonly options: Options,
|
||||
private readonly logItem: ILogItem,
|
||||
) {
|
||||
const outer = this;
|
||||
this._remoteMedia = new RemoteMedia();
|
||||
this.peerConnection = options.webRTC.createPeerConnection({
|
||||
onIceConnectionStateChange(state: RTCIceConnectionState) {
|
||||
outer.logItem.wrap({l: "onIceConnectionStateChange", status: state}, log => {
|
||||
outer.onIceConnectionStateChange(state, log);
|
||||
this.peerConnection = options.webRTC.createPeerConnection(this.options.forceTURN, this.options.turnServers, 0);
|
||||
|
||||
const listen = <K extends keyof PeerConnectionEventMap>(type: K, listener: (this: PeerConnection, ev: PeerConnectionEventMap[K]) => any, options?: boolean | EventListenerOptions): void => {
|
||||
this.peerConnection.addEventListener(type, listener);
|
||||
const dispose = () => {
|
||||
this.peerConnection.removeEventListener(type, listener);
|
||||
};
|
||||
this.disposables.track(dispose);
|
||||
};
|
||||
|
||||
listen("iceconnectionstatechange", () => {
|
||||
const state = this.peerConnection.iceConnectionState;
|
||||
this.logItem.wrap({l: "onIceConnectionStateChange", status: state}, log => {
|
||||
this.onIceConnectionStateChange(state, log);
|
||||
});
|
||||
});
|
||||
listen("icecandidate", event => {
|
||||
this.logItem.wrap("onLocalIceCandidate", log => {
|
||||
if (event.candidate) {
|
||||
this.handleLocalIceCandidate(event.candidate, log);
|
||||
}
|
||||
});
|
||||
});
|
||||
listen("icegatheringstatechange", () => {
|
||||
const state = this.peerConnection.iceGatheringState;
|
||||
this.logItem.wrap({l: "onIceGatheringStateChange", status: state}, log => {
|
||||
this.handleIceGatheringState(state, log);
|
||||
});
|
||||
});
|
||||
listen("track", event => {
|
||||
this.logItem.wrap("onRemoteTrack", log => {
|
||||
this.onRemoteTrack(event.track, event.streams, log);
|
||||
});
|
||||
});
|
||||
listen("datachannel", event => {
|
||||
this.logItem.wrap("onRemoteDataChannel", log => {
|
||||
this._dataChannel = event.channel;
|
||||
this.options.emitUpdate(this, undefined);
|
||||
});
|
||||
});
|
||||
listen("negotiationneeded", () => {
|
||||
const promiseCreator = () => {
|
||||
return this.logItem.wrap("onNegotiationNeeded", log => {
|
||||
return this.handleNegotiation(log);
|
||||
});
|
||||
},
|
||||
onLocalIceCandidate(candidate: RTCIceCandidate) {
|
||||
outer.logItem.wrap("onLocalIceCandidate", log => {
|
||||
outer.handleLocalIceCandidate(candidate, log);
|
||||
});
|
||||
},
|
||||
onIceGatheringStateChange(state: RTCIceGatheringState) {
|
||||
outer.logItem.wrap({l: "onIceGatheringStateChange", status: state}, log => {
|
||||
outer.handleIceGatheringState(state, log);
|
||||
});
|
||||
},
|
||||
onRemoteStreamRemoved(stream: Stream) {
|
||||
outer.logItem.wrap("onRemoteStreamRemoved", log => {
|
||||
outer.updateRemoteMedia(log);
|
||||
});
|
||||
},
|
||||
onRemoteTracksAdded(trackReceiver: TrackReceiver) {
|
||||
outer.logItem.wrap("onRemoteTracksAdded", log => {
|
||||
outer.updateRemoteMedia(log);
|
||||
});
|
||||
},
|
||||
onRemoteDataChannel(dataChannel: any | undefined) {
|
||||
outer.logItem.wrap("onRemoteDataChannel", log => {
|
||||
outer._dataChannel = dataChannel;
|
||||
outer.options.emitUpdate(outer, undefined);
|
||||
});
|
||||
},
|
||||
onNegotiationNeeded() {
|
||||
const promiseCreator = () => {
|
||||
return outer.logItem.wrap("onNegotiationNeeded", log => {
|
||||
return outer.handleNegotiation(log);
|
||||
});
|
||||
};
|
||||
outer.responsePromiseChain = outer.responsePromiseChain?.then(promiseCreator) ?? promiseCreator();
|
||||
}
|
||||
}, this.options.forceTURN, this.options.turnServers, 0);
|
||||
};
|
||||
this.responsePromiseChain = this.responsePromiseChain?.then(promiseCreator) ?? promiseCreator();
|
||||
});
|
||||
}
|
||||
|
||||
get dataChannel(): any | undefined { return this._dataChannel; }
|
||||
@ -151,21 +162,25 @@ export class PeerCall implements IDisposable {
|
||||
|
||||
get hangupReason(): CallErrorCode | undefined { return this._hangupReason; }
|
||||
|
||||
// we should keep an object with streams by purpose ... e.g. RemoteMedia?
|
||||
get remoteMedia(): Readonly<RemoteMedia> {
|
||||
return this._remoteMedia;
|
||||
}
|
||||
|
||||
call(localMedia: LocalMedia): Promise<void> {
|
||||
get remoteMuteSettings(): MuteSettings {
|
||||
return this._remoteMuteSettings;
|
||||
}
|
||||
|
||||
call(localMedia: LocalMedia, localMuteSettings: MuteSettings): Promise<void> {
|
||||
return this.logItem.wrap("call", async log => {
|
||||
if (this._state !== CallState.Fledgling) {
|
||||
return;
|
||||
}
|
||||
this.direction = CallDirection.Outbound;
|
||||
this.setState(CallState.CreateOffer, log);
|
||||
this.setMedia(localMedia);
|
||||
this.localMuteSettings = localMuteSettings;
|
||||
await this.updateLocalMedia(localMedia, log);
|
||||
if (this.localMedia?.dataChannelOptions) {
|
||||
this._dataChannel = this.peerConnection.createDataChannel(this.localMedia.dataChannelOptions);
|
||||
this._dataChannel = this.peerConnection.createDataChannel("channel", this.localMedia.dataChannelOptions);
|
||||
}
|
||||
// after adding the local tracks, and wait for handleNegotiation to be called,
|
||||
// or invite glare where we give up our invite and answer instead
|
||||
@ -173,13 +188,14 @@ export class PeerCall implements IDisposable {
|
||||
});
|
||||
}
|
||||
|
||||
answer(localMedia: LocalMedia): Promise<void> {
|
||||
answer(localMedia: LocalMedia, localMuteSettings: MuteSettings): Promise<void> {
|
||||
return this.logItem.wrap("answer", async log => {
|
||||
if (this._state !== CallState.Ringing) {
|
||||
return;
|
||||
}
|
||||
this.setState(CallState.CreateAnswer, log);
|
||||
this.setMedia(localMedia, log);
|
||||
this.localMuteSettings = localMuteSettings;
|
||||
await this.updateLocalMedia(localMedia, log);
|
||||
let myAnswer: RTCSessionDescriptionInit;
|
||||
try {
|
||||
myAnswer = await this.peerConnection.createAnswer();
|
||||
@ -208,46 +224,47 @@ export class PeerCall implements IDisposable {
|
||||
});
|
||||
}
|
||||
|
||||
setMedia(localMedia: LocalMedia, logItem: ILogItem = this.logItem): Promise<void> {
|
||||
return logItem.wrap("setMedia", async log => {
|
||||
const oldMedia = this.localMedia;
|
||||
this.localMedia = localMedia;
|
||||
const applyStream = (oldStream: Stream | undefined, stream: Stream | undefined, logLabel: string) => {
|
||||
const streamSender = oldMedia ? this.peerConnection.localStreams.get(oldStream!.id) : undefined;
|
||||
|
||||
const applyTrack = (oldTrack: Track | undefined, sender: TrackSender | undefined, track: Track | undefined) => {
|
||||
if (track) {
|
||||
if (oldTrack && sender) {
|
||||
log.wrap(`replacing ${logLabel} ${track.kind} track`, log => {
|
||||
sender.replaceTrack(track);
|
||||
});
|
||||
} else {
|
||||
log.wrap(`adding ${logLabel} ${track.kind} track`, log => {
|
||||
this.peerConnection.addTrack(track);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
if (sender) {
|
||||
log.wrap(`replacing ${logLabel} ${sender.track.kind} track`, log => {
|
||||
this.peerConnection.removeTrack(sender);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applyTrack(oldStream?.audioTrack, streamSender?.audioSender, stream?.audioTrack);
|
||||
applyTrack(oldStream?.videoTrack, streamSender?.videoSender, stream?.videoTrack);
|
||||
}
|
||||
|
||||
applyStream(oldMedia?.userMedia, localMedia?.userMedia, "userMedia");
|
||||
applyStream(oldMedia?.screenShare, localMedia?.screenShare, "screenShare");
|
||||
// TODO: datachannel, but don't do it here as we don't want to do it from answer, rather in different method
|
||||
setMedia(localMedia: LocalMedia): Promise<void> {
|
||||
return this.logItem.wrap("setMedia", async log => {
|
||||
log.set("userMedia_audio", !!getStreamAudioTrack(localMedia.userMedia));
|
||||
log.set("userMedia_video", !!getStreamVideoTrack(localMedia.userMedia));
|
||||
log.set("screenShare_video", !!getStreamVideoTrack(localMedia.screenShare));
|
||||
log.set("datachannel", !!localMedia.dataChannelOptions);
|
||||
await this.updateLocalMedia(localMedia, log);
|
||||
const content: MCallSDPStreamMetadataChanged<MCallBase> = {
|
||||
call_id: this.callId,
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata()
|
||||
};
|
||||
await this.sendSignallingMessage({type: EventType.SDPStreamMetadataChangedPrefix, content}, log);
|
||||
});
|
||||
}
|
||||
|
||||
/** group calls would handle reject at the group call level, not at the peer call level */
|
||||
async reject() {
|
||||
setMuted(localMuteSettings: MuteSettings) {
|
||||
return this.logItem.wrap("setMuted", async log => {
|
||||
this.localMuteSettings = localMuteSettings;
|
||||
log.set("cameraMuted", localMuteSettings.camera);
|
||||
log.set("microphoneMuted", localMuteSettings.microphone);
|
||||
|
||||
if (this.localMedia) {
|
||||
const userMediaAudio = getStreamAudioTrack(this.localMedia.userMedia);
|
||||
if (userMediaAudio) {
|
||||
this.muteTrack(userMediaAudio, this.localMuteSettings.microphone, log);
|
||||
}
|
||||
const userMediaVideo = getStreamVideoTrack(this.localMedia.userMedia);
|
||||
if (userMediaVideo) {
|
||||
this.muteTrack(userMediaVideo, this.localMuteSettings.camera, log);
|
||||
}
|
||||
const content: MCallSDPStreamMetadataChanged<MCallBase> = {
|
||||
call_id: this.callId,
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata()
|
||||
};
|
||||
await this.sendSignallingMessage({type: EventType.SDPStreamMetadataChangedPrefix, content}, log);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
hangup(errorCode: CallErrorCode): Promise<void> {
|
||||
@ -256,6 +273,22 @@ export class PeerCall implements IDisposable {
|
||||
});
|
||||
}
|
||||
|
||||
private muteTrack(track: Track, muted: boolean, log: ILogItem): void {
|
||||
log.wrap({l: "track", kind: track.kind, id: track.id}, log => {
|
||||
const enabled = !muted;
|
||||
log.set("enabled", enabled);
|
||||
const transceiver = this.findTransceiverForTrack(track);
|
||||
if (transceiver) {
|
||||
if (transceiver.sender.track) {
|
||||
transceiver.sender.track.enabled = enabled;
|
||||
}
|
||||
log.set("fromDirection", transceiver.direction);
|
||||
// enableSenderOnTransceiver(transceiver, enabled);
|
||||
log.set("toDirection", transceiver.direction);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async _hangup(errorCode: CallErrorCode, log: ILogItem): Promise<void> {
|
||||
if (this._state === CallState.Ended) {
|
||||
return;
|
||||
@ -277,10 +310,20 @@ export class PeerCall implements IDisposable {
|
||||
case EventType.Answer:
|
||||
await this.handleAnswer(message.content, partyId, log);
|
||||
break;
|
||||
case EventType.Negotiate:
|
||||
await this.onNegotiateReceived(message.content, log);
|
||||
break;
|
||||
case EventType.Candidates:
|
||||
await this.handleRemoteIceCandidates(message.content, partyId, log);
|
||||
break;
|
||||
case EventType.SDPStreamMetadataChanged:
|
||||
case EventType.SDPStreamMetadataChangedPrefix:
|
||||
this.updateRemoteSDPStreamMetadata(message.content[SDPStreamMetadataKey], log);
|
||||
break;
|
||||
case EventType.Hangup:
|
||||
// TODO: this is a bit hacky, double check its what we need
|
||||
this.terminate(CallParty.Remote, message.content.reason ?? CallErrorCode.UserHangup, log);
|
||||
break;
|
||||
default:
|
||||
log.log(`Unknown event type for call: ${message.type}`);
|
||||
break;
|
||||
@ -305,7 +348,7 @@ export class PeerCall implements IDisposable {
|
||||
}
|
||||
|
||||
// calls are serialized and deduplicated by responsePromiseChain
|
||||
private handleNegotiation = async (log: ILogItem): Promise<void> => {
|
||||
private async handleNegotiation(log: ILogItem): Promise<void> {
|
||||
this.makingOffer = true;
|
||||
try {
|
||||
try {
|
||||
@ -334,22 +377,27 @@ export class PeerCall implements IDisposable {
|
||||
this.candidateSendQueue = [];
|
||||
|
||||
// need to queue this
|
||||
const content = {
|
||||
call_id: this.callId,
|
||||
offer,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata(),
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
lifetime: CALL_TIMEOUT_MS
|
||||
};
|
||||
if (this._state === CallState.CreateOffer) {
|
||||
const content = {
|
||||
call_id: this.callId,
|
||||
offer,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata(),
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
lifetime: CALL_TIMEOUT_MS
|
||||
};
|
||||
await this.sendSignallingMessage({type: EventType.Invite, content}, log);
|
||||
this.setState(CallState.InviteSent, log);
|
||||
} else if (this._state === CallState.Connected || this._state === CallState.Connecting) {
|
||||
log.log("would send renegotiation now but not implemented");
|
||||
// send Negotiate message
|
||||
//await this.sendSignallingMessage({type: EventType.Invite, content});
|
||||
//this.setState(CallState.InviteSent);
|
||||
const content = {
|
||||
call_id: this.callId,
|
||||
description: offer,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata(),
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
lifetime: CALL_TIMEOUT_MS
|
||||
};
|
||||
await this.sendSignallingMessage({type: EventType.Negotiate, content}, log);
|
||||
}
|
||||
} finally {
|
||||
this.makingOffer = false;
|
||||
@ -385,7 +433,7 @@ export class PeerCall implements IDisposable {
|
||||
}
|
||||
await this.handleInvite(content, partyId, log);
|
||||
// TODO: need to skip state check
|
||||
await this.answer(this.localMedia!);
|
||||
await this.answer(this.localMedia!, this.localMuteSettings!);
|
||||
} else {
|
||||
log.log(
|
||||
"Glare detected: rejecting incoming call " + newCallId +
|
||||
@ -444,12 +492,12 @@ export class PeerCall implements IDisposable {
|
||||
// According to previous comments in this file, firefox at some point did not
|
||||
// add streams until media started arriving on them. Testing latest firefox
|
||||
// (81 at time of writing), this is no longer a problem, so let's do it the correct way.
|
||||
// if (this.peerConnection.remoteTracks.length === 0) {
|
||||
// await log.wrap(`Call no remote stream or no tracks after setting remote description!`, async log => {
|
||||
// return this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, log);
|
||||
// });
|
||||
// return;
|
||||
// }
|
||||
if (this.peerConnection.getReceivers().length === 0) {
|
||||
await log.wrap(`Call no remote stream or no tracks after setting remote description!`, async log => {
|
||||
return this.terminate(CallParty.Local, CallErrorCode.SetRemoteDescription, log);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState(CallState.Ringing, log);
|
||||
|
||||
@ -571,55 +619,55 @@ export class PeerCall implements IDisposable {
|
||||
await this.addIceCandidates(candidates, log);
|
||||
}
|
||||
|
||||
// private async onNegotiateReceived(event: MatrixEvent): Promise<void> {
|
||||
// const content = event.getContent<MCallNegotiate>();
|
||||
// const description = content.description;
|
||||
// if (!description || !description.sdp || !description.type) {
|
||||
// this.logger.info(`Ignoring invalid m.call.negotiate event`);
|
||||
// return;
|
||||
// }
|
||||
// // Politeness always follows the direction of the call: in a glare situation,
|
||||
// // we pick either the inbound or outbound call, so one side will always be
|
||||
// // inbound and one outbound
|
||||
// const polite = this.direction === CallDirection.Inbound;
|
||||
private async onNegotiateReceived(content: MCallNegotiate<MCallBase>, log: ILogItem): Promise<void> {
|
||||
const description = content.description;
|
||||
if (!description || !description.sdp || !description.type) {
|
||||
log.log(`Ignoring invalid m.call.negotiate event`);
|
||||
return;
|
||||
}
|
||||
// Politeness always follows the direction of the call: in a glare situation,
|
||||
// we pick either the inbound or outbound call, so one side will always be
|
||||
// inbound and one outbound
|
||||
const polite = this.direction === CallDirection.Inbound;
|
||||
|
||||
// // Here we follow the perfect negotiation logic from
|
||||
// // https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation
|
||||
// const offerCollision = (
|
||||
// (description.type === 'offer') &&
|
||||
// (this.makingOffer || this.peerConnection.signalingState !== 'stable')
|
||||
// );
|
||||
// Here we follow the perfect negotiation logic from
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation
|
||||
const offerCollision = (
|
||||
(description.type === 'offer') &&
|
||||
(this.makingOffer || this.peerConnection.signalingState !== 'stable')
|
||||
);
|
||||
|
||||
// this.ignoreOffer = !polite && offerCollision;
|
||||
// if (this.ignoreOffer) {
|
||||
// this.logger.info(`Ignoring colliding negotiate event because we're impolite`);
|
||||
// return;
|
||||
// }
|
||||
this.ignoreOffer = !polite && offerCollision;
|
||||
if (this.ignoreOffer) {
|
||||
log.log(`Ignoring colliding negotiate event because we're impolite`);
|
||||
return;
|
||||
}
|
||||
|
||||
// const sdpStreamMetadata = content[SDPStreamMetadataKey];
|
||||
// if (sdpStreamMetadata) {
|
||||
// this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
|
||||
// } else {
|
||||
// this.logger.warn(`Received negotiation event without SDPStreamMetadata!`);
|
||||
// }
|
||||
const sdpStreamMetadata = content[SDPStreamMetadataKey];
|
||||
if (sdpStreamMetadata) {
|
||||
this.updateRemoteSDPStreamMetadata(sdpStreamMetadata, log);
|
||||
} else {
|
||||
log.log(`Received negotiation event without SDPStreamMetadata!`);
|
||||
}
|
||||
|
||||
// try {
|
||||
// await this.peerConnection.setRemoteDescription(description);
|
||||
|
||||
// if (description.type === 'offer') {
|
||||
// await this.peerConnection.setLocalDescription();
|
||||
// await this.sendSignallingMessage({
|
||||
// type: EventType.CallNegotiate,
|
||||
// content: {
|
||||
// description: this.peerConnection.localDescription!,
|
||||
// [SDPStreamMetadataKey]: this.getSDPMetadata(),
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
// } catch (err) {
|
||||
// this.logger.warn(`Failed to complete negotiation`, err);
|
||||
// }
|
||||
// }
|
||||
try {
|
||||
await this.peerConnection.setRemoteDescription(description);
|
||||
if (description.type === 'offer') {
|
||||
await this.peerConnection.setLocalDescription();
|
||||
const content = {
|
||||
call_id: this.callId,
|
||||
description: this.peerConnection.localDescription!,
|
||||
[SDPStreamMetadataKey]: this.getSDPMetadata(),
|
||||
version: 1,
|
||||
seq: this.seq++,
|
||||
lifetime: CALL_TIMEOUT_MS
|
||||
};
|
||||
await this.sendSignallingMessage({type: EventType.Negotiate, content}, log);
|
||||
}
|
||||
} catch (err) {
|
||||
log.log(`Failed to complete negotiation`, err);
|
||||
}
|
||||
}
|
||||
|
||||
private async sendAnswer(log: ILogItem): Promise<void> {
|
||||
const localDescription = this.peerConnection.localDescription!;
|
||||
@ -719,7 +767,7 @@ export class PeerCall implements IDisposable {
|
||||
// this will accumulate all updates into one object, so we still have the old stream info when we change stream id
|
||||
this.remoteSDPStreamMetadata = recursivelyAssign(this.remoteSDPStreamMetadata || {}, metadata, true);
|
||||
this.updateRemoteMedia(log);
|
||||
// TODO: apply muting
|
||||
|
||||
}
|
||||
|
||||
private async addBufferedIceCandidates(log: ILogItem): Promise<void> {
|
||||
@ -825,13 +873,11 @@ export class PeerCall implements IDisposable {
|
||||
const metadata = {};
|
||||
if (this.localMedia?.userMedia) {
|
||||
const streamId = this.localMedia.userMedia.id;
|
||||
const streamSender = this.peerConnection.localStreams.get(streamId);
|
||||
metadata[streamId] = {
|
||||
purpose: SDPStreamMetadataPurpose.Usermedia,
|
||||
audio_muted: !(streamSender?.audioSender?.enabled),
|
||||
video_muted: !(streamSender?.videoSender?.enabled),
|
||||
audio_muted: this.localMuteSettings?.microphone || !getStreamAudioTrack(this.localMedia.userMedia),
|
||||
video_muted: this.localMuteSettings?.camera || !getStreamVideoTrack(this.localMedia.userMedia),
|
||||
};
|
||||
console.log("video_muted", streamSender?.videoSender?.enabled, streamSender?.videoSender?.transceiver?.direction, streamSender?.videoSender?.transceiver?.currentDirection, JSON.stringify(metadata));
|
||||
}
|
||||
if (this.localMedia?.screenShare) {
|
||||
const streamId = this.localMedia.screenShare.id;
|
||||
@ -842,17 +888,78 @@ export class PeerCall implements IDisposable {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private updateRemoteMedia(log: ILogItem) {
|
||||
private findReceiverForStream(kind: TrackKind, streamId: string): Receiver | undefined {
|
||||
return this.peerConnection.getReceivers().find(r => {
|
||||
return r.track.kind === kind && this._remoteTrackToStreamId.get(r.track.id) === streamId;
|
||||
});
|
||||
}
|
||||
|
||||
private findTransceiverForTrack(track: Track): Transceiver | undefined {
|
||||
return this.peerConnection.getTransceivers().find(t => {
|
||||
return t.sender.track?.id === track.id;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
private onRemoteTrack(track: Track, streams: ReadonlyArray<Stream>, log: ILogItem) {
|
||||
if (streams.length === 0) {
|
||||
log.log({l: `ignoring ${track.kind} streamless track`, id: track.id});
|
||||
return;
|
||||
}
|
||||
const stream = streams[0];
|
||||
this._remoteTrackToStreamId.set(track.id, stream.id);
|
||||
if (!this._remoteStreams.has(stream.id)) {
|
||||
const listener = (event: StreamTrackEvent): void => {
|
||||
this.logItem.wrap({l: "removetrack", id: event.track.id}, log => {
|
||||
const streamId = this._remoteTrackToStreamId.get(event.track.id);
|
||||
if (streamId) {
|
||||
this._remoteTrackToStreamId.delete(event.track.id);
|
||||
const streamDetails = this._remoteStreams.get(streamId);
|
||||
if (streamDetails && streamDetails.stream.getTracks().length === 0) {
|
||||
this.disposables.disposeTracked(disposeListener);
|
||||
this._remoteStreams.delete(stream.id);
|
||||
this.updateRemoteMedia(log);
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
stream.addEventListener("removetrack", listener);
|
||||
const disposeListener = () => {
|
||||
stream.removeEventListener("removetrack", listener);
|
||||
};
|
||||
this.disposables.track(disposeListener);
|
||||
this._remoteStreams.set(stream.id, {
|
||||
disposeListener,
|
||||
stream
|
||||
});
|
||||
this.updateRemoteMedia(log);
|
||||
}
|
||||
}
|
||||
|
||||
private updateRemoteMedia(log: ILogItem): void {
|
||||
this._remoteMedia.userMedia = undefined;
|
||||
this._remoteMedia.screenShare = undefined;
|
||||
if (this.remoteSDPStreamMetadata) {
|
||||
for (const [streamId, streamReceiver] of this.peerConnection.remoteStreams.entries()) {
|
||||
const metaData = this.remoteSDPStreamMetadata[streamId];
|
||||
for (const streamDetails of this._remoteStreams.values()) {
|
||||
const {stream} = streamDetails;
|
||||
const metaData = this.remoteSDPStreamMetadata[stream.id];
|
||||
if (metaData) {
|
||||
if (metaData.purpose === SDPStreamMetadataPurpose.Usermedia) {
|
||||
this._remoteMedia.userMedia = streamReceiver.stream;
|
||||
this._remoteMedia.userMedia = stream;
|
||||
const audioReceiver = this.findReceiverForStream(TrackKind.Audio, stream.id);
|
||||
if (audioReceiver) {
|
||||
audioReceiver.track.enabled = !metaData.audio_muted;
|
||||
}
|
||||
const videoReceiver = this.findReceiverForStream(TrackKind.Video, stream.id);
|
||||
if (videoReceiver) {
|
||||
videoReceiver.track.enabled = !metaData.video_muted;
|
||||
}
|
||||
this._remoteMuteSettings = new MuteSettings(
|
||||
metaData.audio_muted || !audioReceiver?.track,
|
||||
metaData.video_muted || !videoReceiver?.track
|
||||
);
|
||||
} else if (metaData.purpose === SDPStreamMetadataPurpose.Screenshare) {
|
||||
this._remoteMedia.screenShare = streamReceiver.stream;
|
||||
this._remoteMedia.screenShare = stream;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -860,6 +967,55 @@ export class PeerCall implements IDisposable {
|
||||
this.options.emitUpdate(this, undefined);
|
||||
}
|
||||
|
||||
private updateLocalMedia(localMedia: LocalMedia, logItem: ILogItem): Promise<void> {
|
||||
return logItem.wrap("updateLocalMedia", async log => {
|
||||
const oldMedia = this.localMedia;
|
||||
this.localMedia = localMedia;
|
||||
const applyStream = async (oldStream: Stream | undefined, stream: Stream | undefined, streamPurpose: SDPStreamMetadataPurpose) => {
|
||||
const applyTrack = async (oldTrack: Track | undefined, newTrack: Track | undefined) => {
|
||||
if (!oldTrack && newTrack) {
|
||||
log.wrap(`adding ${streamPurpose} ${newTrack.kind} track`, log => {
|
||||
const sender = this.peerConnection.addTrack(newTrack, stream!);
|
||||
this.options.webRTC.prepareSenderForPurpose(this.peerConnection, sender, streamPurpose);
|
||||
});
|
||||
} else if (oldTrack) {
|
||||
const sender = this.peerConnection.getSenders().find(s => s.track && s.track.id === oldTrack.id);
|
||||
if (sender) {
|
||||
if (newTrack && oldTrack.id !== newTrack.id) {
|
||||
try {
|
||||
await log.wrap(`replacing ${streamPurpose} ${newTrack.kind} track`, log => {
|
||||
return sender.replaceTrack(newTrack);
|
||||
});
|
||||
} catch (err) {
|
||||
// can't replace the track without renegotiating{
|
||||
log.wrap(`adding and removing ${streamPurpose} ${newTrack.kind} track`, log => {
|
||||
this.peerConnection.removeTrack(sender);
|
||||
const newSender = this.peerConnection.addTrack(newTrack);
|
||||
this.options.webRTC.prepareSenderForPurpose(this.peerConnection, newSender, streamPurpose);
|
||||
});
|
||||
}
|
||||
} else if (!newTrack) {
|
||||
log.wrap(`removing ${streamPurpose} ${sender.track!.kind} track`, log => {
|
||||
this.peerConnection.removeTrack(sender);
|
||||
});
|
||||
} else {
|
||||
log.log(`${streamPurpose} ${oldTrack.kind} track hasn't changed`);
|
||||
}
|
||||
}
|
||||
// TODO: should we do something if we didn't find the sender? e.g. some other code already removed the sender but didn't update localMedia
|
||||
}
|
||||
}
|
||||
|
||||
await applyTrack(getStreamAudioTrack(oldStream), getStreamAudioTrack(stream));
|
||||
await applyTrack(getStreamVideoTrack(oldStream), getStreamVideoTrack(stream));
|
||||
};
|
||||
|
||||
await applyStream(oldMedia?.userMedia, localMedia?.userMedia, SDPStreamMetadataPurpose.Usermedia);
|
||||
await applyStream(oldMedia?.screenShare, localMedia?.screenShare, SDPStreamMetadataPurpose.Screenshare);
|
||||
// TODO: datachannel, but don't do it here as we don't want to do it from answer, rather in different method
|
||||
});
|
||||
}
|
||||
|
||||
private async delay(timeoutMs: number): Promise<void> {
|
||||
// Allow a short time for initial candidates to be gathered
|
||||
const timeout = this.disposables.track(this.options.createTimeout(timeoutMs));
|
||||
@ -875,7 +1031,7 @@ export class PeerCall implements IDisposable {
|
||||
|
||||
public dispose(): void {
|
||||
this.disposables.dispose();
|
||||
this.peerConnection.dispose();
|
||||
this.peerConnection.close();
|
||||
}
|
||||
|
||||
public close(reason: CallErrorCode | undefined, log: ILogItem): void {
|
||||
@ -915,100 +1071,11 @@ export enum CallDirection {
|
||||
Outbound = 'outbound',
|
||||
}
|
||||
|
||||
export enum CallErrorCode {
|
||||
/** The user chose to end the call */
|
||||
UserHangup = 'user_hangup',
|
||||
|
||||
/** An error code when the local client failed to create an offer. */
|
||||
LocalOfferFailed = 'local_offer_failed',
|
||||
/**
|
||||
* An error code when there is no local mic/camera to use. This may be because
|
||||
* the hardware isn't plugged in, or the user has explicitly denied access.
|
||||
*/
|
||||
NoUserMedia = 'no_user_media',
|
||||
|
||||
/**
|
||||
* Error code used when a call event failed to send
|
||||
* because unknown devices were present in the room
|
||||
*/
|
||||
UnknownDevices = 'unknown_devices',
|
||||
|
||||
/**
|
||||
* Error code used when we fail to send the invite
|
||||
* for some reason other than there being unknown devices
|
||||
*/
|
||||
SendInvite = 'send_invite',
|
||||
|
||||
/**
|
||||
* An answer could not be created
|
||||
*/
|
||||
CreateAnswer = 'create_answer',
|
||||
|
||||
/**
|
||||
* Error code used when we fail to send the answer
|
||||
* for some reason other than there being unknown devices
|
||||
*/
|
||||
SendAnswer = 'send_answer',
|
||||
|
||||
/**
|
||||
* The session description from the other side could not be set
|
||||
*/
|
||||
SetRemoteDescription = 'set_remote_description',
|
||||
|
||||
/**
|
||||
* The session description from this side could not be set
|
||||
*/
|
||||
SetLocalDescription = 'set_local_description',
|
||||
|
||||
/**
|
||||
* A different device answered the call
|
||||
*/
|
||||
AnsweredElsewhere = 'answered_elsewhere',
|
||||
|
||||
/**
|
||||
* No media connection could be established to the other party
|
||||
*/
|
||||
IceFailed = 'ice_failed',
|
||||
|
||||
/**
|
||||
* The invite timed out whilst waiting for an answer
|
||||
*/
|
||||
InviteTimeout = 'invite_timeout',
|
||||
|
||||
/**
|
||||
* The call was replaced by another call
|
||||
*/
|
||||
Replaced = 'replaced',
|
||||
|
||||
/**
|
||||
* Signalling for the call could not be sent (other than the initial invite)
|
||||
*/
|
||||
SignallingFailed = 'signalling_timeout',
|
||||
|
||||
/**
|
||||
* The remote party is busy
|
||||
*/
|
||||
UserBusy = 'user_busy',
|
||||
|
||||
/**
|
||||
* We transferred the call off to somewhere else
|
||||
*/
|
||||
Transfered = 'transferred',
|
||||
|
||||
/**
|
||||
* A call from the same user was found with a new session id
|
||||
*/
|
||||
NewSession = 'new_session',
|
||||
}
|
||||
|
||||
/**
|
||||
* The version field that we set in m.call.* events
|
||||
*/
|
||||
const VOIP_PROTO_VERSION = 1;
|
||||
|
||||
/** The fallback ICE server to use for STUN or TURN protocols. */
|
||||
const FALLBACK_ICE_SERVER = 'stun:turn.matrix.org';
|
||||
|
||||
/** The length of time a call can be ringing for. */
|
||||
const CALL_TIMEOUT_MS = 60000;
|
||||
|
||||
@ -1029,9 +1096,28 @@ export function handlesEventType(eventType: string): boolean {
|
||||
return eventType === EventType.Invite ||
|
||||
eventType === EventType.Candidates ||
|
||||
eventType === EventType.Answer ||
|
||||
eventType === EventType.Hangup;
|
||||
eventType === EventType.Hangup ||
|
||||
eventType === EventType.SDPStreamMetadataChanged ||
|
||||
eventType === EventType.SDPStreamMetadataChangedPrefix ||
|
||||
eventType === EventType.Negotiate;
|
||||
}
|
||||
|
||||
export function tests() {
|
||||
|
||||
function enableSenderOnTransceiver(transceiver: Transceiver, enabled: boolean) {
|
||||
return enableTransceiver(transceiver, enabled, "sendonly", "recvonly");
|
||||
}
|
||||
|
||||
function enableTransceiver(transceiver: Transceiver, enabled: boolean, exclusiveValue: TransceiverDirection, excludedValue: TransceiverDirection) {
|
||||
if (enabled) {
|
||||
if (transceiver.direction === "inactive") {
|
||||
transceiver.direction = exclusiveValue;
|
||||
} else {
|
||||
transceiver.direction = "sendrecv";
|
||||
}
|
||||
} else {
|
||||
if (transceiver.direction === "sendrecv") {
|
||||
transceiver.direction = excludedValue;
|
||||
} else {
|
||||
transceiver.direction = "inactive";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -9,14 +9,14 @@
|
||||
|
||||
## TODO
|
||||
- DONE: implement receiving hangup
|
||||
- making logging better
|
||||
- DONE: implement cloning the localMedia so it works in safari?
|
||||
- DONE: implement 3 retries per peer
|
||||
- implement muting tracks with m.call.sdp_stream_metadata_changed
|
||||
- implement renegotiation
|
||||
- making logging better
|
||||
- finish session id support
|
||||
- call peers are essentially identified by (userid, deviceid, sessionid). If see a new session id, we first disconnect from the current member so we're ready to connect with a clean slate again (in a member event, also in to_device? no harm I suppose, given olm encryption ensures you can't spoof the deviceid).
|
||||
- implement to_device messages arriving before m.call(.member) state event
|
||||
- implement muting tracks with m.call.sdp_stream_metadata_changed
|
||||
- implement cloning the localMedia so it works in safari?
|
||||
- DONE: implement 3 retries per peer
|
||||
- reeable crypto & implement fetching olm keys before sending encrypted signalling message
|
||||
- local echo for join/leave buttons?
|
||||
- make UI pretsy
|
||||
|
@ -1,7 +1,7 @@
|
||||
// allow non-camelcase as these are events type that go onto the wire
|
||||
/* eslint-disable camelcase */
|
||||
import type {StateEvent} from "../storage/types";
|
||||
|
||||
import type {SessionDescription} from "../../platform/types/WebRTC";
|
||||
export enum EventType {
|
||||
GroupCall = "org.matrix.msc3401.call",
|
||||
GroupCallMember = "org.matrix.msc3401.call.member",
|
||||
@ -36,11 +36,6 @@ export interface CallMemberContent {
|
||||
["m.calls"]: CallMembership[];
|
||||
}
|
||||
|
||||
export interface SessionDescription {
|
||||
sdp?: string;
|
||||
type: RTCSdpType
|
||||
}
|
||||
|
||||
export enum SDPStreamMetadataPurpose {
|
||||
Usermedia = "m.usermedia",
|
||||
Screenshare = "m.screenshare",
|
||||
@ -97,6 +92,12 @@ export type MCallInvite<Base extends MCallBase> = Base & {
|
||||
[SDPStreamMetadataKey]: SDPStreamMetadata;
|
||||
}
|
||||
|
||||
export type MCallNegotiate<Base extends MCallBase> = Base & {
|
||||
description: SessionDescription;
|
||||
lifetime: number;
|
||||
[SDPStreamMetadataKey]: SDPStreamMetadata;
|
||||
}
|
||||
|
||||
export type MCallSDPStreamMetadataChanged<Base extends MCallBase> = Base & {
|
||||
[SDPStreamMetadataKey]: SDPStreamMetadata;
|
||||
}
|
||||
@ -213,6 +214,7 @@ export enum CallErrorCode {
|
||||
|
||||
export type SignallingMessage<Base extends MCallBase> =
|
||||
{type: EventType.Invite, content: MCallInvite<Base>} |
|
||||
{type: EventType.Negotiate, content: MCallNegotiate<Base>} |
|
||||
{type: EventType.Answer, content: MCallAnswer<Base>} |
|
||||
{type: EventType.SDPStreamMetadataChanged | EventType.SDPStreamMetadataChangedPrefix, content: MCallSDPStreamMetadataChanged<Base>} |
|
||||
{type: EventType.Candidates, content: MCallCandidates<Base>} |
|
||||
|
37
src/matrix/calls/common.ts
Normal file
37
src/matrix/calls/common.ts
Normal file
@ -0,0 +1,37 @@
|
||||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import type {Track, Stream} from "../../platform/types/MediaDevices";
|
||||
|
||||
export function getStreamAudioTrack(stream: Stream | undefined): Track | undefined {
|
||||
return stream?.getAudioTracks()[0];
|
||||
}
|
||||
|
||||
export function getStreamVideoTrack(stream: Stream | undefined): Track | undefined {
|
||||
return stream?.getVideoTracks()[0];
|
||||
}
|
||||
|
||||
export class MuteSettings {
|
||||
constructor (public readonly microphone: boolean = false, public readonly camera: boolean = false) {}
|
||||
|
||||
toggleCamera(): MuteSettings {
|
||||
return new MuteSettings(this.microphone, !this.camera);
|
||||
}
|
||||
|
||||
toggleMicrophone(): MuteSettings {
|
||||
return new MuteSettings(!this.microphone, this.camera);
|
||||
}
|
||||
}
|
@ -17,6 +17,7 @@ limitations under the License.
|
||||
import {ObservableMap} from "../../../observable/map/ObservableMap";
|
||||
import {Member} from "./Member";
|
||||
import {LocalMedia} from "../LocalMedia";
|
||||
import {MuteSettings} from "../common";
|
||||
import {RoomMember} from "../../room/members/RoomMember";
|
||||
import {EventEmitter} from "../../../utils/EventEmitter";
|
||||
import {EventType, CallIntent} from "../callEventTypes";
|
||||
@ -63,6 +64,7 @@ export class GroupCall extends EventEmitter<{change: never}> {
|
||||
private _localMedia?: LocalMedia = undefined;
|
||||
private _memberOptions: MemberOptions;
|
||||
private _state: GroupCallState;
|
||||
private localMuteSettings: MuteSettings = new MuteSettings(false, false);
|
||||
|
||||
private _deviceIndex?: number;
|
||||
private _eventTimestamp?: number;
|
||||
@ -129,11 +131,36 @@ export class GroupCall extends EventEmitter<{change: never}> {
|
||||
this.emitChange();
|
||||
// send invite to all members that are < my userId
|
||||
for (const [,member] of this._members) {
|
||||
member.connect(this._localMedia!.clone());
|
||||
member.connect(this._localMedia!.clone(), this.localMuteSettings);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async setMedia(localMedia: LocalMedia): Promise<void> {
|
||||
if (this._state === GroupCallState.Joining || this._state === GroupCallState.Joined && this._localMedia) {
|
||||
const oldMedia = this._localMedia!;
|
||||
this._localMedia = localMedia;
|
||||
await Promise.all(Array.from(this._members.values()).map(m => {
|
||||
return m.setMedia(localMedia, oldMedia);
|
||||
}));
|
||||
oldMedia?.stopExcept(localMedia);
|
||||
}
|
||||
}
|
||||
|
||||
setMuted(muteSettings: MuteSettings) {
|
||||
this.localMuteSettings = muteSettings;
|
||||
if (this._state === GroupCallState.Joining || this._state === GroupCallState.Joined) {
|
||||
for (const [,member] of this._members) {
|
||||
member.setMuted(this.localMuteSettings);
|
||||
}
|
||||
}
|
||||
this.emitChange();
|
||||
}
|
||||
|
||||
get muteSettings(): MuteSettings {
|
||||
return this.localMuteSettings;
|
||||
}
|
||||
|
||||
get hasJoined() {
|
||||
return this._state === GroupCallState.Joining || this._state === GroupCallState.Joined;
|
||||
}
|
||||
@ -168,7 +195,7 @@ export class GroupCall extends EventEmitter<{change: never}> {
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
create(callType: "m.video" | "m.voice"): Promise<void> {
|
||||
create(type: "m.video" | "m.voice"): Promise<void> {
|
||||
return this.logItem.wrap("create", async log => {
|
||||
if (this._state !== GroupCallState.Fledgling) {
|
||||
return;
|
||||
@ -176,7 +203,7 @@ export class GroupCall extends EventEmitter<{change: never}> {
|
||||
this._state = GroupCallState.Creating;
|
||||
this.emitChange();
|
||||
this.callContent = Object.assign({
|
||||
"m.type": callType,
|
||||
"m.type": type,
|
||||
}, this.callContent);
|
||||
const request = this.options.hsApi.sendState(this.roomId, EventType.GroupCall, this.id, this.callContent!, {log});
|
||||
await request.response();
|
||||
@ -235,7 +262,7 @@ export class GroupCall extends EventEmitter<{change: never}> {
|
||||
this._members.add(memberKey, member);
|
||||
if (this._state === GroupCallState.Joining || this._state === GroupCallState.Joined) {
|
||||
// Safari can't send a MediaStream to multiple sources, so clone it
|
||||
member.connect(this._localMedia!.clone());
|
||||
member.connect(this._localMedia!.clone(), this.localMuteSettings);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ import {makeTxnId, makeId} from "../../common";
|
||||
import {EventType, CallErrorCode} from "../callEventTypes";
|
||||
import {formatToDeviceMessagesPayload} from "../../common";
|
||||
|
||||
import type {MuteSettings} from "../common";
|
||||
import type {Options as PeerCallOptions, RemoteMedia} from "../PeerCall";
|
||||
import type {LocalMedia} from "../LocalMedia";
|
||||
import type {HomeServerApi} from "../../net/HomeServerApi";
|
||||
@ -50,6 +51,7 @@ const errorCodesWithoutRetry = [
|
||||
export class Member {
|
||||
private peerCall?: PeerCall;
|
||||
private localMedia?: LocalMedia;
|
||||
private localMuteSettings?: MuteSettings;
|
||||
private retryCount: number = 0;
|
||||
|
||||
constructor(
|
||||
@ -65,6 +67,10 @@ export class Member {
|
||||
return this.peerCall?.remoteMedia;
|
||||
}
|
||||
|
||||
get remoteMuteSettings(): MuteSettings | undefined {
|
||||
return this.peerCall?.remoteMuteSettings;
|
||||
}
|
||||
|
||||
get isConnected(): boolean {
|
||||
return this.peerCall?.state === CallState.Connected;
|
||||
}
|
||||
@ -90,9 +96,10 @@ export class Member {
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
connect(localMedia: LocalMedia) {
|
||||
connect(localMedia: LocalMedia, localMuteSettings: MuteSettings) {
|
||||
this.logItem.wrap("connect", () => {
|
||||
this.localMedia = localMedia;
|
||||
this.localMuteSettings = localMuteSettings;
|
||||
// otherwise wait for it to connect
|
||||
let shouldInitiateCall;
|
||||
// the lexicographically lower side initiates the call
|
||||
@ -103,7 +110,7 @@ export class Member {
|
||||
}
|
||||
if (shouldInitiateCall) {
|
||||
this.peerCall = this._createPeerCall(makeId("c"));
|
||||
this.peerCall.call(localMedia);
|
||||
this.peerCall.call(localMedia, localMuteSettings);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -133,7 +140,7 @@ export class Member {
|
||||
/** @internal */
|
||||
emitUpdate = (peerCall: PeerCall, params: any) => {
|
||||
if (peerCall.state === CallState.Ringing) {
|
||||
peerCall.answer(this.localMedia!);
|
||||
peerCall.answer(this.localMedia!, this.localMuteSettings!);
|
||||
}
|
||||
else if (peerCall.state === CallState.Ended) {
|
||||
const hangupReason = peerCall.hangupReason;
|
||||
@ -142,7 +149,7 @@ export class Member {
|
||||
if (hangupReason && !errorCodesWithoutRetry.includes(hangupReason)) {
|
||||
this.retryCount += 1;
|
||||
if (this.retryCount <= 3) {
|
||||
this.connect(this.localMedia!);
|
||||
this.connect(this.localMedia!, this.localMuteSettings!);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -166,6 +173,8 @@ export class Member {
|
||||
}
|
||||
}
|
||||
};
|
||||
// TODO: remove this for release
|
||||
log.set("payload", groupMessage.content);
|
||||
const request = this.options.hsApi.sendToDevice(
|
||||
message.type,
|
||||
//"m.room.encrypted",
|
||||
@ -194,6 +203,17 @@ export class Member {
|
||||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
async setMedia(localMedia: LocalMedia, previousMedia: LocalMedia): Promise<void> {
|
||||
this.localMedia = localMedia.replaceClone(this.localMedia, previousMedia);
|
||||
await this.peerCall?.setMedia(this.localMedia);
|
||||
}
|
||||
|
||||
setMuted(muteSettings: MuteSettings) {
|
||||
this.localMuteSettings = muteSettings;
|
||||
this.peerCall?.setMuted(muteSettings);
|
||||
}
|
||||
|
||||
private _createPeerCall(callId: string): PeerCall {
|
||||
return new PeerCall(callId, Object.assign({}, this.options, {
|
||||
emitUpdate: this.emitUpdate,
|
||||
|
@ -29,32 +29,31 @@ export class MediaRepository {
|
||||
this._platform = platform;
|
||||
}
|
||||
|
||||
mxcUrlThumbnail(url: string, width: number, height: number, method: "crop" | "scale"): string | null {
|
||||
mxcUrlThumbnail(url: string, width: number, height: number, method: "crop" | "scale"): string | undefined {
|
||||
const parts = this._parseMxcUrl(url);
|
||||
if (parts) {
|
||||
const [serverName, mediaId] = parts;
|
||||
const httpUrl = `${this._homeserver}/_matrix/media/r0/thumbnail/${encodeURIComponent(serverName)}/${encodeURIComponent(mediaId)}`;
|
||||
return httpUrl + "?" + encodeQueryParams({width: Math.round(width), height: Math.round(height), method});
|
||||
}
|
||||
return null;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
mxcUrl(url: string): string | null {
|
||||
mxcUrl(url: string): string | undefined {
|
||||
const parts = this._parseMxcUrl(url);
|
||||
if (parts) {
|
||||
const [serverName, mediaId] = parts;
|
||||
return `${this._homeserver}/_matrix/media/r0/download/${encodeURIComponent(serverName)}/${encodeURIComponent(mediaId)}`;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private _parseMxcUrl(url: string): string[] | null {
|
||||
private _parseMxcUrl(url: string): string[] | undefined {
|
||||
const prefix = "mxc://";
|
||||
if (url.startsWith(prefix)) {
|
||||
return url.substr(prefix.length).split("/", 2);
|
||||
} else {
|
||||
return null;
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {SortedArray, AsyncMappedList, ConcatList, ObservableArray} from "../../../observable/index.js";
|
||||
import {SortedArray, AsyncMappedList, ConcatList, ObservableArray} from "../../../observable/index";
|
||||
import {Disposables} from "../../../utils/Disposables";
|
||||
import {Direction} from "./Direction";
|
||||
import {TimelineReader} from "./persistence/TimelineReader.js";
|
||||
|
@ -46,3 +46,12 @@ Object.assign(BaseObservableMap.prototype, {
|
||||
return new JoinedMap([this].concat(otherMaps));
|
||||
}
|
||||
});
|
||||
|
||||
declare module "./map/BaseObservableMap" {
|
||||
interface BaseObservableMap<K, V> {
|
||||
sortValues(comparator: (a: V, b: V) => number): SortedMapList<V>;
|
||||
mapValues<M>(mapper: (V, emitSpontaneousUpdate: (params: any) => void) => M, updater: (mappedValue: M, params: any, value: V) => void): MappedMap<K, M>;
|
||||
filterValues(filter: (V, K) => boolean): FilteredMap<K, V>;
|
||||
join(...otherMaps: BaseObservableMap<K, V>[]): JoinedMap<K, V>;
|
||||
}
|
||||
}
|
53
src/observable/map/ObservableValueMap.ts
Normal file
53
src/observable/map/ObservableValueMap.ts
Normal file
@ -0,0 +1,53 @@
|
||||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {BaseObservableMap} from "./BaseObservableMap";
|
||||
import {BaseObservableValue} from "../value/BaseObservableValue";
|
||||
import {SubscriptionHandle} from "../BaseObservable";
|
||||
|
||||
export class ObservableValueMap<K, V> extends BaseObservableMap<K, V> {
|
||||
private subscription?: SubscriptionHandle;
|
||||
|
||||
constructor(private readonly key: K, private readonly observableValue: BaseObservableValue<V>) {
|
||||
super();
|
||||
}
|
||||
|
||||
onSubscribeFirst() {
|
||||
this.subscription = this.observableValue.subscribe(value => {
|
||||
this.emitUpdate(this.key, value, undefined);
|
||||
});
|
||||
super.onSubscribeFirst();
|
||||
}
|
||||
|
||||
onUnsubscribeLast() {
|
||||
this.subscription!();
|
||||
super.onUnsubscribeLast();
|
||||
}
|
||||
|
||||
*[Symbol.iterator](): Iterator<[K, V]> {
|
||||
yield [this.key, this.observableValue.get()];
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return 1;
|
||||
}
|
||||
|
||||
get(key: K): V | undefined {
|
||||
if (key == this.key) {
|
||||
return this.observableValue.get();
|
||||
}
|
||||
}
|
||||
}
|
45
src/observable/value/EventObservableValue.ts
Normal file
45
src/observable/value/EventObservableValue.ts
Normal file
@ -0,0 +1,45 @@
|
||||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {BaseObservableValue} from "./BaseObservableValue";
|
||||
import {EventEmitter} from "../../utils/EventEmitter";
|
||||
|
||||
export class EventObservableValue<T, V extends EventEmitter<T>> extends BaseObservableValue<V> {
|
||||
private eventSubscription: () => void;
|
||||
|
||||
constructor(
|
||||
private readonly value: V,
|
||||
private readonly eventName: keyof T
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
onSubscribeFirst(): void {
|
||||
this.eventSubscription = this.value.disposableOn(this.eventName, () => {
|
||||
this.emit(this.value);
|
||||
});
|
||||
super.onSubscribeFirst();
|
||||
}
|
||||
|
||||
onUnsubscribeLast(): void {
|
||||
this.eventSubscription!();
|
||||
super.onUnsubscribeLast();
|
||||
}
|
||||
|
||||
get(): V {
|
||||
return this.value;
|
||||
}
|
||||
}
|
@ -14,19 +14,48 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
export interface Event {}
|
||||
|
||||
export interface MediaDevices {
|
||||
// filter out audiooutput
|
||||
enumerate(): Promise<MediaDeviceInfo[]>;
|
||||
// to assign to a video element, we downcast to WrappedTrack and use the stream property.
|
||||
getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Stream>;
|
||||
getScreenShareTrack(): Promise<Stream | undefined>;
|
||||
createVolumeMeasurer(stream: Stream, callback: () => void): VolumeMeasurer;
|
||||
}
|
||||
|
||||
// Typescript definitions derived from https://github.com/microsoft/TypeScript/blob/main/lib/lib.dom.d.ts
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
export interface StreamTrackEvent extends Event {
|
||||
readonly track: Track;
|
||||
}
|
||||
|
||||
export interface StreamEventMap {
|
||||
"addtrack": StreamTrackEvent;
|
||||
"removetrack": StreamTrackEvent;
|
||||
}
|
||||
|
||||
export interface Stream {
|
||||
readonly audioTrack: AudioTrack | undefined;
|
||||
readonly videoTrack: Track | undefined;
|
||||
getTracks(): ReadonlyArray<Track>;
|
||||
getAudioTracks(): ReadonlyArray<Track>;
|
||||
getVideoTracks(): ReadonlyArray<Track>;
|
||||
readonly id: string;
|
||||
clone(): Stream;
|
||||
addEventListener<K extends keyof StreamEventMap>(type: K, listener: (this: Stream, ev: StreamEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
|
||||
removeEventListener<K extends keyof StreamEventMap>(type: K, listener: (this: Stream, ev: StreamEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
|
||||
}
|
||||
|
||||
export enum TrackKind {
|
||||
@ -38,12 +67,13 @@ export interface Track {
|
||||
readonly kind: TrackKind;
|
||||
readonly label: string;
|
||||
readonly id: string;
|
||||
readonly settings: MediaTrackSettings;
|
||||
enabled: boolean;
|
||||
// getSettings(): MediaTrackSettings;
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
export interface AudioTrack extends Track {
|
||||
// TODO: how to emit updates on this?
|
||||
export interface VolumeMeasurer {
|
||||
get isSpeaking(): boolean;
|
||||
setSpeakingThreshold(threshold: number): void;
|
||||
stop();
|
||||
}
|
||||
|
||||
|
@ -14,63 +14,155 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {Track, Stream} from "./MediaDevices";
|
||||
import {Track, Stream, Event} from "./MediaDevices";
|
||||
import {SDPStreamMetadataPurpose} from "../../matrix/calls/callEventTypes";
|
||||
|
||||
export interface WebRTC {
|
||||
createPeerConnection(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize: number): PeerConnection;
|
||||
createPeerConnection(forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize: number): PeerConnection;
|
||||
prepareSenderForPurpose(peerConnection: PeerConnection, sender: Sender, purpose: SDPStreamMetadataPurpose): void;
|
||||
}
|
||||
|
||||
export interface StreamSender {
|
||||
get stream(): Stream;
|
||||
get audioSender(): TrackSender | undefined;
|
||||
get videoSender(): TrackSender | undefined;
|
||||
// Typescript definitions derived from https://github.com/microsoft/TypeScript/blob/main/lib/lib.dom.d.ts
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
export interface DataChannelEventMap {
|
||||
"bufferedamountlow": Event;
|
||||
"close": Event;
|
||||
"error": Event;
|
||||
"message": MessageEvent;
|
||||
"open": Event;
|
||||
}
|
||||
|
||||
export interface StreamReceiver {
|
||||
get stream(): Stream;
|
||||
get audioReceiver(): TrackReceiver | undefined;
|
||||
get videoReceiver(): TrackReceiver | undefined;
|
||||
}
|
||||
|
||||
export interface TrackReceiver {
|
||||
get track(): Track;
|
||||
get enabled(): boolean;
|
||||
enable(enabled: boolean); // this modifies the transceiver direction
|
||||
}
|
||||
|
||||
export interface TrackSender extends TrackReceiver {
|
||||
/** replaces the track if possible without renegotiation. Can throw. */
|
||||
replaceTrack(track: Track | undefined): Promise<void>;
|
||||
/** make any needed adjustments to the sender or transceiver settings
|
||||
* depending on the purpose, after adding the track to the connection */
|
||||
prepareForPurpose(purpose: SDPStreamMetadataPurpose): void;
|
||||
}
|
||||
|
||||
export interface PeerConnectionHandler {
|
||||
onIceConnectionStateChange(state: RTCIceConnectionState);
|
||||
onLocalIceCandidate(candidate: RTCIceCandidate);
|
||||
onIceGatheringStateChange(state: RTCIceGatheringState);
|
||||
onRemoteStreamRemoved(stream: Stream);
|
||||
onRemoteTracksAdded(receiver: TrackReceiver);
|
||||
onRemoteDataChannel(dataChannel: any | undefined);
|
||||
onNegotiationNeeded();
|
||||
}
|
||||
|
||||
export interface PeerConnection {
|
||||
get iceGatheringState(): RTCIceGatheringState;
|
||||
get signalingState(): RTCSignalingState;
|
||||
get localDescription(): RTCSessionDescription | undefined;
|
||||
get localStreams(): ReadonlyMap<string, StreamSender>;
|
||||
get remoteStreams(): ReadonlyMap<string, StreamReceiver>;
|
||||
createOffer(): Promise<RTCSessionDescriptionInit>;
|
||||
createAnswer(): Promise<RTCSessionDescriptionInit>;
|
||||
setLocalDescription(description?: RTCSessionDescriptionInit): Promise<void>;
|
||||
setRemoteDescription(description: RTCSessionDescriptionInit): Promise<void>;
|
||||
addIceCandidate(candidate: RTCIceCandidate): Promise<void>;
|
||||
addTrack(track: Track): TrackSender | undefined;
|
||||
removeTrack(track: TrackSender): void;
|
||||
createDataChannel(options: RTCDataChannelInit): any;
|
||||
dispose(): void;
|
||||
export interface DataChannel {
|
||||
binaryType: BinaryType;
|
||||
readonly id: number | null;
|
||||
readonly label: string;
|
||||
readonly negotiated: boolean;
|
||||
readonly readyState: DataChannelState;
|
||||
close(): void;
|
||||
send(data: string): void;
|
||||
send(data: Blob): void;
|
||||
send(data: ArrayBuffer): void;
|
||||
send(data: ArrayBufferView): void;
|
||||
addEventListener<K extends keyof DataChannelEventMap>(type: K, listener: (this: DataChannel, ev: DataChannelEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
|
||||
removeEventListener<K extends keyof DataChannelEventMap>(type: K, listener: (this: DataChannel, ev: DataChannelEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
|
||||
}
|
||||
|
||||
export interface DataChannelInit {
|
||||
id?: number;
|
||||
maxPacketLifeTime?: number;
|
||||
maxRetransmits?: number;
|
||||
negotiated?: boolean;
|
||||
ordered?: boolean;
|
||||
protocol?: string;
|
||||
}
|
||||
|
||||
export interface DataChannelEvent extends Event {
|
||||
readonly channel: DataChannel;
|
||||
}
|
||||
|
||||
export interface PeerConnectionIceEvent extends Event {
|
||||
readonly candidate: RTCIceCandidate | null;
|
||||
}
|
||||
|
||||
export interface TrackEvent extends Event {
|
||||
readonly receiver: Receiver;
|
||||
readonly streams: ReadonlyArray<Stream>;
|
||||
readonly track: Track;
|
||||
readonly transceiver: Transceiver;
|
||||
}
|
||||
|
||||
export interface PeerConnectionEventMap {
|
||||
"connectionstatechange": Event;
|
||||
"datachannel": DataChannelEvent;
|
||||
"icecandidate": PeerConnectionIceEvent;
|
||||
"iceconnectionstatechange": Event;
|
||||
"icegatheringstatechange": Event;
|
||||
"negotiationneeded": Event;
|
||||
"signalingstatechange": Event;
|
||||
"track": TrackEvent;
|
||||
}
|
||||
|
||||
export type DataChannelState = "closed" | "closing" | "connecting" | "open";
|
||||
export type IceConnectionState = "checking" | "closed" | "completed" | "connected" | "disconnected" | "failed" | "new";
|
||||
export type PeerConnectionState = "closed" | "connected" | "connecting" | "disconnected" | "failed" | "new";
|
||||
export type SignalingState = "closed" | "have-local-offer" | "have-local-pranswer" | "have-remote-offer" | "have-remote-pranswer" | "stable";
|
||||
export type IceGatheringState = "complete" | "gathering" | "new";
|
||||
export type SdpType = "answer" | "offer" | "pranswer" | "rollback";
|
||||
export type TransceiverDirection = "inactive" | "recvonly" | "sendonly" | "sendrecv" | "stopped";
|
||||
export interface SessionDescription {
|
||||
readonly sdp: string;
|
||||
readonly type: SdpType;
|
||||
}
|
||||
|
||||
export interface AnswerOptions {}
|
||||
|
||||
export interface OfferOptions {
|
||||
iceRestart?: boolean;
|
||||
offerToReceiveAudio?: boolean;
|
||||
offerToReceiveVideo?: boolean;
|
||||
}
|
||||
|
||||
export interface SessionDescriptionInit {
|
||||
sdp?: string;
|
||||
type: SdpType;
|
||||
}
|
||||
|
||||
export interface LocalSessionDescriptionInit {
|
||||
sdp?: string;
|
||||
type?: SdpType;
|
||||
}
|
||||
|
||||
/** A WebRTC connection between the local computer and a remote peer. It provides methods to connect to a remote peer, maintain and monitor the connection, and close the connection once it's no longer needed. */
|
||||
export interface PeerConnection {
|
||||
readonly connectionState: PeerConnectionState;
|
||||
readonly iceConnectionState: IceConnectionState;
|
||||
readonly iceGatheringState: IceGatheringState;
|
||||
readonly localDescription: SessionDescription | null;
|
||||
readonly remoteDescription: SessionDescription | null;
|
||||
readonly signalingState: SignalingState;
|
||||
addIceCandidate(candidate?: RTCIceCandidateInit): Promise<void>;
|
||||
addTrack(track: Track, ...streams: Stream[]): Sender;
|
||||
close(): void;
|
||||
createAnswer(options?: AnswerOptions): Promise<SessionDescriptionInit>;
|
||||
createDataChannel(label: string, dataChannelDict?: DataChannelInit): DataChannel;
|
||||
createOffer(options?: OfferOptions): Promise<SessionDescriptionInit>;
|
||||
getReceivers(): Receiver[];
|
||||
getSenders(): Sender[];
|
||||
getTransceivers(): Transceiver[];
|
||||
removeTrack(sender: Sender): void;
|
||||
restartIce(): void;
|
||||
setLocalDescription(description?: LocalSessionDescriptionInit): Promise<void>;
|
||||
setRemoteDescription(description: SessionDescriptionInit): Promise<void>;
|
||||
addEventListener<K extends keyof PeerConnectionEventMap>(type: K, listener: (this: PeerConnection, ev: PeerConnectionEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
|
||||
removeEventListener<K extends keyof PeerConnectionEventMap>(type: K, listener: (this: PeerConnection, ev: PeerConnectionEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
|
||||
}
|
||||
|
||||
export interface Receiver {
|
||||
readonly track: Track;
|
||||
}
|
||||
|
||||
export interface Sender {
|
||||
readonly track: Track | null;
|
||||
replaceTrack(withTrack: Track | null): Promise<void>;
|
||||
}
|
||||
|
||||
export interface Transceiver {
|
||||
readonly currentDirection: TransceiverDirection | null;
|
||||
direction: TransceiverDirection;
|
||||
readonly mid: string | null;
|
||||
readonly receiver: Receiver;
|
||||
readonly sender: Sender;
|
||||
stop(): void;
|
||||
}
|
||||
|
@ -19,6 +19,6 @@ import {hkdf} from "../../utils/crypto/hkdf";
|
||||
|
||||
import {Platform as ModernPlatform} from "./Platform.js";
|
||||
|
||||
export function Platform(container, assetPaths, config, options = null) {
|
||||
return new ModernPlatform(container, assetPaths, config, options, {aesjs, hkdf});
|
||||
export function Platform({ container, assetPaths, config, configURL, options = null }) {
|
||||
return new ModernPlatform({ container, assetPaths, config, configURL, options, cryptoExtras: { aesjs, hkdf }});
|
||||
}
|
||||
|
@ -128,10 +128,11 @@ function adaptUIOnVisualViewportResize(container) {
|
||||
}
|
||||
|
||||
export class Platform {
|
||||
constructor(container, assetPaths, config, options = null, cryptoExtras = null) {
|
||||
constructor({ container, assetPaths, config, configURL, options = null, cryptoExtras = null }) {
|
||||
this._container = container;
|
||||
this._assetPaths = assetPaths;
|
||||
this._config = config;
|
||||
this._configURL = configURL;
|
||||
this.settingsStorage = new SettingsStorage("hydrogen_setting_v1_");
|
||||
this.clock = new Clock();
|
||||
this.encoding = new Encoding();
|
||||
@ -144,7 +145,7 @@ export class Platform {
|
||||
this._serviceWorkerHandler = new ServiceWorkerHandler();
|
||||
this._serviceWorkerHandler.registerAndStart(assetPaths.serviceWorker);
|
||||
}
|
||||
this.notificationService = new NotificationService(this._serviceWorkerHandler, config.push);
|
||||
this.notificationService = undefined;
|
||||
// Only try to use crypto when olm is provided
|
||||
if(this._assetPaths.olm) {
|
||||
this.crypto = new Crypto(cryptoExtras);
|
||||
@ -169,6 +170,20 @@ export class Platform {
|
||||
this.webRTC = new DOMWebRTC();
|
||||
}
|
||||
|
||||
async init() {
|
||||
if (!this._config) {
|
||||
if (!this._configURL) {
|
||||
throw new Error("Neither config nor configURL was provided!");
|
||||
}
|
||||
const {body}= await this.request(this._configURL, {method: "GET", format: "json", cache: true}).response();
|
||||
this._config = body;
|
||||
}
|
||||
this.notificationService = new NotificationService(
|
||||
this._serviceWorkerHandler,
|
||||
this._config.push
|
||||
);
|
||||
}
|
||||
|
||||
_createLogger(isDevelopment) {
|
||||
// Make sure that loginToken does not end up in the logs
|
||||
const transformer = (item) => {
|
||||
|
@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {MediaDevices as IMediaDevices, Stream, Track, TrackKind, AudioTrack} from "../../types/MediaDevices";
|
||||
import {MediaDevices as IMediaDevices, Stream, Track, TrackKind, VolumeMeasurer} from "../../types/MediaDevices";
|
||||
|
||||
const POLLING_INTERVAL = 200; // ms
|
||||
export const SPEAKING_THRESHOLD = -60; // dB
|
||||
@ -30,12 +30,12 @@ export class MediaDevicesWrapper implements IMediaDevices {
|
||||
|
||||
async getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Stream> {
|
||||
const stream = await this.mediaDevices.getUserMedia(this.getUserMediaContraints(audio, video));
|
||||
return new StreamWrapper(stream);
|
||||
return stream as Stream;
|
||||
}
|
||||
|
||||
async getScreenShareTrack(): Promise<Stream | undefined> {
|
||||
const stream = await this.mediaDevices.getDisplayMedia(this.getScreenshareContraints());
|
||||
return new StreamWrapper(stream);
|
||||
return stream as Stream;
|
||||
}
|
||||
|
||||
private getUserMediaContraints(audio: boolean | MediaDeviceInfo, video: boolean | MediaDeviceInfo): MediaStreamConstraints {
|
||||
@ -68,55 +68,13 @@ export class MediaDevicesWrapper implements IMediaDevices {
|
||||
video: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class StreamWrapper implements Stream {
|
||||
|
||||
public audioTrack: AudioTrackWrapper | undefined = undefined;
|
||||
public videoTrack: TrackWrapper | undefined = undefined;
|
||||
|
||||
constructor(public readonly stream: MediaStream) {
|
||||
for (const track of stream.getTracks()) {
|
||||
this.update(track);
|
||||
}
|
||||
}
|
||||
|
||||
get id(): string { return this.stream.id; }
|
||||
|
||||
clone(): Stream {
|
||||
return new StreamWrapper(this.stream.clone());
|
||||
}
|
||||
|
||||
update(track: MediaStreamTrack): TrackWrapper | undefined {
|
||||
if (track.kind === "video") {
|
||||
if (!this.videoTrack || track.id !== this.videoTrack.track.id) {
|
||||
this.videoTrack = new TrackWrapper(track, this.stream);
|
||||
}
|
||||
return this.videoTrack;
|
||||
} else if (track.kind === "audio") {
|
||||
if (!this.audioTrack || track.id !== this.audioTrack.track.id) {
|
||||
this.audioTrack = new AudioTrackWrapper(track, this.stream);
|
||||
}
|
||||
return this.audioTrack;
|
||||
}
|
||||
createVolumeMeasurer(stream: Stream, callback: () => void): VolumeMeasurer {
|
||||
return new WebAudioVolumeMeasurer(stream as MediaStream, callback);
|
||||
}
|
||||
}
|
||||
|
||||
export class TrackWrapper implements Track {
|
||||
constructor(
|
||||
public readonly track: MediaStreamTrack,
|
||||
public readonly stream: MediaStream
|
||||
) {}
|
||||
|
||||
get kind(): TrackKind { return this.track.kind as TrackKind; }
|
||||
get label(): string { return this.track.label; }
|
||||
get id(): string { return this.track.id; }
|
||||
get settings(): MediaTrackSettings { return this.track.getSettings(); }
|
||||
|
||||
stop() { this.track.stop(); }
|
||||
}
|
||||
|
||||
export class AudioTrackWrapper extends TrackWrapper {
|
||||
export class WebAudioVolumeMeasurer implements VolumeMeasurer {
|
||||
private measuringVolumeActivity = false;
|
||||
private audioContext?: AudioContext;
|
||||
private analyser: AnalyserNode;
|
||||
@ -125,9 +83,12 @@ export class AudioTrackWrapper extends TrackWrapper {
|
||||
private speaking = false;
|
||||
private volumeLooperTimeout: number;
|
||||
private speakingVolumeSamples: number[];
|
||||
private callback: () => void;
|
||||
private stream: MediaStream;
|
||||
|
||||
constructor(track: MediaStreamTrack, stream: MediaStream) {
|
||||
super(track, stream);
|
||||
constructor(stream: MediaStream, callback: () => void) {
|
||||
this.stream = stream;
|
||||
this.callback = callback;
|
||||
this.speakingVolumeSamples = new Array(SPEAKING_SAMPLE_COUNT).fill(-Infinity);
|
||||
this.initVolumeMeasuring();
|
||||
this.measureVolumeActivity(true);
|
||||
@ -147,6 +108,7 @@ export class AudioTrackWrapper extends TrackWrapper {
|
||||
} else {
|
||||
this.measuringVolumeActivity = false;
|
||||
this.speakingVolumeSamples.fill(-Infinity);
|
||||
this.callback();
|
||||
// this.emit(CallFeedEvent.VolumeChanged, -Infinity);
|
||||
}
|
||||
}
|
||||
@ -167,7 +129,6 @@ export class AudioTrackWrapper extends TrackWrapper {
|
||||
this.frequencyBinCount = new Float32Array(this.analyser.frequencyBinCount);
|
||||
}
|
||||
|
||||
|
||||
public setSpeakingThreshold(threshold: number) {
|
||||
this.speakingThreshold = threshold;
|
||||
}
|
||||
@ -189,6 +150,7 @@ export class AudioTrackWrapper extends TrackWrapper {
|
||||
this.speakingVolumeSamples.shift();
|
||||
this.speakingVolumeSamples.push(maxVolume);
|
||||
|
||||
this.callback();
|
||||
// this.emit(CallFeedEvent.VolumeChanged, maxVolume);
|
||||
|
||||
let newSpeaking = false;
|
||||
@ -204,267 +166,16 @@ export class AudioTrackWrapper extends TrackWrapper {
|
||||
|
||||
if (this.speaking !== newSpeaking) {
|
||||
this.speaking = newSpeaking;
|
||||
this.callback();
|
||||
// this.emit(CallFeedEvent.Speaking, this.speaking);
|
||||
}
|
||||
|
||||
this.volumeLooperTimeout = setTimeout(this.volumeLooper, POLLING_INTERVAL) as unknown as number;
|
||||
};
|
||||
|
||||
public dispose(): void {
|
||||
public stop(): void {
|
||||
clearTimeout(this.volumeLooperTimeout);
|
||||
this.analyser.disconnect();
|
||||
this.audioContext?.close();
|
||||
}
|
||||
}
|
||||
|
||||
// export interface ICallFeedOpts {
|
||||
// client: MatrixClient;
|
||||
// roomId: string;
|
||||
// userId: string;
|
||||
// stream: MediaStream;
|
||||
// purpose: SDPStreamMetadataPurpose;
|
||||
// audioMuted: boolean;
|
||||
// videoMuted: boolean;
|
||||
// }
|
||||
|
||||
// export enum CallFeedEvent {
|
||||
// NewStream = "new_stream",
|
||||
// MuteStateChanged = "mute_state_changed",
|
||||
// VolumeChanged = "volume_changed",
|
||||
// Speaking = "speaking",
|
||||
// }
|
||||
|
||||
// export class CallFeed extends EventEmitter {
|
||||
// public stream: MediaStream;
|
||||
// public sdpMetadataStreamId: string;
|
||||
// public userId: string;
|
||||
// public purpose: SDPStreamMetadataPurpose;
|
||||
// public speakingVolumeSamples: number[];
|
||||
|
||||
// private client: MatrixClient;
|
||||
// private roomId: string;
|
||||
// private audioMuted: boolean;
|
||||
// private videoMuted: boolean;
|
||||
// private measuringVolumeActivity = false;
|
||||
// private audioContext: AudioContext;
|
||||
// private analyser: AnalyserNode;
|
||||
// private frequencyBinCount: Float32Array;
|
||||
// private speakingThreshold = SPEAKING_THRESHOLD;
|
||||
// private speaking = false;
|
||||
// private volumeLooperTimeout: number;
|
||||
|
||||
// constructor(opts: ICallFeedOpts) {
|
||||
// super();
|
||||
|
||||
// this.client = opts.client;
|
||||
// this.roomId = opts.roomId;
|
||||
// this.userId = opts.userId;
|
||||
// this.purpose = opts.purpose;
|
||||
// this.audioMuted = opts.audioMuted;
|
||||
// this.videoMuted = opts.videoMuted;
|
||||
// this.speakingVolumeSamples = new Array(SPEAKING_SAMPLE_COUNT).fill(-Infinity);
|
||||
// this.sdpMetadataStreamId = opts.stream.id;
|
||||
|
||||
// this.updateStream(null, opts.stream);
|
||||
|
||||
// if (this.hasAudioTrack) {
|
||||
// this.initVolumeMeasuring();
|
||||
// }
|
||||
// }
|
||||
|
||||
// private get hasAudioTrack(): boolean {
|
||||
// return this.stream.getAudioTracks().length > 0;
|
||||
// }
|
||||
|
||||
// private updateStream(oldStream: MediaStream, newStream: MediaStream): void {
|
||||
// if (newStream === oldStream) return;
|
||||
|
||||
// if (oldStream) {
|
||||
// oldStream.removeEventListener("addtrack", this.onAddTrack);
|
||||
// this.measureVolumeActivity(false);
|
||||
// }
|
||||
// if (newStream) {
|
||||
// this.stream = newStream;
|
||||
// newStream.addEventListener("addtrack", this.onAddTrack);
|
||||
|
||||
// if (this.hasAudioTrack) {
|
||||
// this.initVolumeMeasuring();
|
||||
// } else {
|
||||
// this.measureVolumeActivity(false);
|
||||
// }
|
||||
// }
|
||||
|
||||
// this.emit(CallFeedEvent.NewStream, this.stream);
|
||||
// }
|
||||
|
||||
// private initVolumeMeasuring(): void {
|
||||
// const AudioContext = window.AudioContext || window.webkitAudioContext;
|
||||
// if (!this.hasAudioTrack || !AudioContext) return;
|
||||
|
||||
// this.audioContext = new AudioContext();
|
||||
|
||||
// this.analyser = this.audioContext.createAnalyser();
|
||||
// this.analyser.fftSize = 512;
|
||||
// this.analyser.smoothingTimeConstant = 0.1;
|
||||
|
||||
// const mediaStreamAudioSourceNode = this.audioContext.createMediaStreamSource(this.stream);
|
||||
// mediaStreamAudioSourceNode.connect(this.analyser);
|
||||
|
||||
// this.frequencyBinCount = new Float32Array(this.analyser.frequencyBinCount);
|
||||
// }
|
||||
|
||||
// private onAddTrack = (): void => {
|
||||
// this.emit(CallFeedEvent.NewStream, this.stream);
|
||||
// };
|
||||
|
||||
// /**
|
||||
// * Returns callRoom member
|
||||
// * @returns member of the callRoom
|
||||
// */
|
||||
// public getMember(): RoomMember {
|
||||
// const callRoom = this.client.getRoom(this.roomId);
|
||||
// return callRoom.getMember(this.userId);
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Returns true if CallFeed is local, otherwise returns false
|
||||
// * @returns {boolean} is local?
|
||||
// */
|
||||
// public isLocal(): boolean {
|
||||
// return this.userId === this.client.getUserId();
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Returns true if audio is muted or if there are no audio
|
||||
// * tracks, otherwise returns false
|
||||
// * @returns {boolean} is audio muted?
|
||||
// */
|
||||
// public isAudioMuted(): boolean {
|
||||
// return this.stream.getAudioTracks().length === 0 || this.audioMuted;
|
||||
// }
|
||||
|
||||
// *
|
||||
// * Returns true video is muted or if there are no video
|
||||
// * tracks, otherwise returns false
|
||||
// * @returns {boolean} is video muted?
|
||||
|
||||
// public isVideoMuted(): boolean {
|
||||
// // We assume only one video track
|
||||
// return this.stream.getVideoTracks().length === 0 || this.videoMuted;
|
||||
// }
|
||||
|
||||
// public isSpeaking(): boolean {
|
||||
// return this.speaking;
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Replaces the current MediaStream with a new one.
|
||||
// * This method should be only used by MatrixCall.
|
||||
// * @param newStream new stream with which to replace the current one
|
||||
// */
|
||||
// public setNewStream(newStream: MediaStream): void {
|
||||
// this.updateStream(this.stream, newStream);
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Set feed's internal audio mute state
|
||||
// * @param muted is the feed's audio muted?
|
||||
// */
|
||||
// public setAudioMuted(muted: boolean): void {
|
||||
// this.audioMuted = muted;
|
||||
// this.speakingVolumeSamples.fill(-Infinity);
|
||||
// this.emit(CallFeedEvent.MuteStateChanged, this.audioMuted, this.videoMuted);
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Set feed's internal video mute state
|
||||
// * @param muted is the feed's video muted?
|
||||
// */
|
||||
// public setVideoMuted(muted: boolean): void {
|
||||
// this.videoMuted = muted;
|
||||
// this.emit(CallFeedEvent.MuteStateChanged, this.audioMuted, this.videoMuted);
|
||||
// }
|
||||
|
||||
// /**
|
||||
// * Starts emitting volume_changed events where the emitter value is in decibels
|
||||
// * @param enabled emit volume changes
|
||||
// */
|
||||
// public measureVolumeActivity(enabled: boolean): void {
|
||||
// if (enabled) {
|
||||
// if (!this.audioContext || !this.analyser || !this.frequencyBinCount || !this.hasAudioTrack) return;
|
||||
|
||||
// this.measuringVolumeActivity = true;
|
||||
// this.volumeLooper();
|
||||
// } else {
|
||||
// this.measuringVolumeActivity = false;
|
||||
// this.speakingVolumeSamples.fill(-Infinity);
|
||||
// this.emit(CallFeedEvent.VolumeChanged, -Infinity);
|
||||
// }
|
||||
// }
|
||||
|
||||
// public setSpeakingThreshold(threshold: number) {
|
||||
// this.speakingThreshold = threshold;
|
||||
// }
|
||||
|
||||
// private volumeLooper = () => {
|
||||
// if (!this.analyser) return;
|
||||
|
||||
// if (!this.measuringVolumeActivity) return;
|
||||
|
||||
// this.analyser.getFloatFrequencyData(this.frequencyBinCount);
|
||||
|
||||
// let maxVolume = -Infinity;
|
||||
// for (let i = 0; i < this.frequencyBinCount.length; i++) {
|
||||
// if (this.frequencyBinCount[i] > maxVolume) {
|
||||
// maxVolume = this.frequencyBinCount[i];
|
||||
// }
|
||||
// }
|
||||
|
||||
// this.speakingVolumeSamples.shift();
|
||||
// this.speakingVolumeSamples.push(maxVolume);
|
||||
|
||||
// this.emit(CallFeedEvent.VolumeChanged, maxVolume);
|
||||
|
||||
// let newSpeaking = false;
|
||||
|
||||
// for (let i = 0; i < this.speakingVolumeSamples.length; i++) {
|
||||
// const volume = this.speakingVolumeSamples[i];
|
||||
|
||||
// if (volume > this.speakingThreshold) {
|
||||
// newSpeaking = true;
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (this.speaking !== newSpeaking) {
|
||||
// this.speaking = newSpeaking;
|
||||
// this.emit(CallFeedEvent.Speaking, this.speaking);
|
||||
// }
|
||||
|
||||
// this.volumeLooperTimeout = setTimeout(this.volumeLooper, POLLING_INTERVAL);
|
||||
// };
|
||||
|
||||
// public clone(): CallFeed {
|
||||
// const mediaHandler = this.client.getMediaHandler();
|
||||
// const stream = this.stream.clone();
|
||||
|
||||
// if (this.purpose === SDPStreamMetadataPurpose.Usermedia) {
|
||||
// mediaHandler.userMediaStreams.push(stream);
|
||||
// } else {
|
||||
// mediaHandler.screensharingStreams.push(stream);
|
||||
// }
|
||||
|
||||
// return new CallFeed({
|
||||
// client: this.client,
|
||||
// roomId: this.roomId,
|
||||
// userId: this.userId,
|
||||
// stream,
|
||||
// purpose: this.purpose,
|
||||
// audioMuted: this.audioMuted,
|
||||
// videoMuted: this.videoMuted,
|
||||
// });
|
||||
// }
|
||||
|
||||
// public dispose(): void {
|
||||
// clearTimeout(this.volumeLooperTimeout);
|
||||
// this.measureVolumeActivity(false);
|
||||
// }
|
||||
// }
|
||||
|
@ -14,9 +14,8 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {StreamWrapper, TrackWrapper, AudioTrackWrapper} from "./MediaDevices";
|
||||
import {Stream, Track, AudioTrack, TrackKind} from "../../types/MediaDevices";
|
||||
import {WebRTC, PeerConnectionHandler, StreamSender, TrackSender, StreamReceiver, TrackReceiver, PeerConnection} from "../../types/WebRTC";
|
||||
import {Stream, Track, TrackKind} from "../../types/MediaDevices";
|
||||
import {WebRTC, Sender, PeerConnection} from "../../types/WebRTC";
|
||||
import {SDPStreamMetadataPurpose} from "../../../matrix/calls/callEventTypes";
|
||||
|
||||
const POLLING_INTERVAL = 200; // ms
|
||||
@ -24,151 +23,21 @@ export const SPEAKING_THRESHOLD = -60; // dB
|
||||
const SPEAKING_SAMPLE_COUNT = 8; // samples
|
||||
|
||||
export class DOMWebRTC implements WebRTC {
|
||||
createPeerConnection(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize): PeerConnection {
|
||||
return new DOMPeerConnection(handler, forceTURN, turnServers, iceCandidatePoolSize);
|
||||
}
|
||||
}
|
||||
|
||||
export class RemoteStreamWrapper extends StreamWrapper {
|
||||
constructor(stream: MediaStream, private readonly emptyCallback: (stream: RemoteStreamWrapper) => void) {
|
||||
super(stream);
|
||||
this.stream.addEventListener("removetrack", this.onTrackRemoved);
|
||||
createPeerConnection(forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize): PeerConnection {
|
||||
return new RTCPeerConnection({
|
||||
iceTransportPolicy: forceTURN ? 'relay' : undefined,
|
||||
iceServers: turnServers,
|
||||
iceCandidatePoolSize: iceCandidatePoolSize,
|
||||
}) as PeerConnection;
|
||||
}
|
||||
|
||||
onTrackRemoved = (evt: MediaStreamTrackEvent) => {
|
||||
if (evt.track.id === this.audioTrack?.track.id) {
|
||||
this.audioTrack = undefined;
|
||||
} else if (evt.track.id === this.videoTrack?.track.id) {
|
||||
this.videoTrack = undefined;
|
||||
}
|
||||
if (!this.audioTrack && !this.videoTrack) {
|
||||
this.emptyCallback(this);
|
||||
}
|
||||
};
|
||||
|
||||
dispose() {
|
||||
this.stream.removeEventListener("removetrack", this.onTrackRemoved);
|
||||
}
|
||||
}
|
||||
|
||||
export class DOMStreamSender implements StreamSender {
|
||||
public audioSender: DOMTrackSender | undefined;
|
||||
public videoSender: DOMTrackSender | undefined;
|
||||
|
||||
constructor(public readonly stream: StreamWrapper) {}
|
||||
|
||||
update(transceivers: ReadonlyArray<RTCRtpTransceiver>, sender: RTCRtpSender): DOMTrackSender | undefined {
|
||||
const transceiver = transceivers.find(t => t.sender === sender);
|
||||
if (transceiver && sender.track) {
|
||||
const trackWrapper = this.stream.update(sender.track);
|
||||
if (trackWrapper) {
|
||||
if (trackWrapper.kind === TrackKind.Video && (!this.videoSender || this.videoSender.track.id !== trackWrapper.id)) {
|
||||
this.videoSender = new DOMTrackSender(trackWrapper, transceiver);
|
||||
return this.videoSender;
|
||||
} else if (trackWrapper.kind === TrackKind.Audio && (!this.audioSender || this.audioSender.track.id !== trackWrapper.id)) {
|
||||
this.audioSender = new DOMTrackSender(trackWrapper, transceiver);
|
||||
return this.audioSender;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class DOMStreamReceiver implements StreamReceiver {
|
||||
public audioReceiver: DOMTrackReceiver | undefined;
|
||||
public videoReceiver: DOMTrackReceiver | undefined;
|
||||
|
||||
constructor(public readonly stream: RemoteStreamWrapper) {}
|
||||
|
||||
update(event: RTCTrackEvent): DOMTrackReceiver | undefined {
|
||||
const {receiver} = event;
|
||||
const {track} = receiver;
|
||||
const trackWrapper = this.stream.update(track);
|
||||
if (trackWrapper) {
|
||||
if (trackWrapper.kind === TrackKind.Video) {
|
||||
this.videoReceiver = new DOMTrackReceiver(trackWrapper, event.transceiver);
|
||||
return this.videoReceiver;
|
||||
} else {
|
||||
this.audioReceiver = new DOMTrackReceiver(trackWrapper, event.transceiver);
|
||||
return this.audioReceiver;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class DOMTrackSenderOrReceiver implements TrackReceiver {
|
||||
constructor(
|
||||
public readonly track: TrackWrapper,
|
||||
public readonly transceiver: RTCRtpTransceiver,
|
||||
private readonly exclusiveValue: RTCRtpTransceiverDirection,
|
||||
private readonly excludedValue: RTCRtpTransceiverDirection
|
||||
) {}
|
||||
|
||||
get enabled(): boolean {
|
||||
return this.transceiver.direction === "sendrecv" ||
|
||||
this.transceiver.direction === this.exclusiveValue;
|
||||
}
|
||||
|
||||
enable(enabled: boolean) {
|
||||
if (enabled !== this.enabled) {
|
||||
if (enabled) {
|
||||
if (this.transceiver.direction === "inactive") {
|
||||
this.transceiver.direction = this.exclusiveValue;
|
||||
} else {
|
||||
this.transceiver.direction = "sendrecv";
|
||||
}
|
||||
} else {
|
||||
if (this.transceiver.direction === "sendrecv") {
|
||||
this.transceiver.direction = this.excludedValue;
|
||||
} else {
|
||||
this.transceiver.direction = "inactive";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class DOMTrackReceiver extends DOMTrackSenderOrReceiver {
|
||||
constructor(
|
||||
track: TrackWrapper,
|
||||
transceiver: RTCRtpTransceiver,
|
||||
) {
|
||||
super(track, transceiver, "recvonly", "sendonly");
|
||||
}
|
||||
}
|
||||
|
||||
export class DOMTrackSender extends DOMTrackSenderOrReceiver {
|
||||
constructor(
|
||||
track: TrackWrapper,
|
||||
transceiver: RTCRtpTransceiver,
|
||||
) {
|
||||
super(track, transceiver, "sendonly", "recvonly");
|
||||
}
|
||||
/** replaces the track if possible without renegotiation. Can throw. */
|
||||
replaceTrack(track: Track | undefined): Promise<void> {
|
||||
return this.transceiver.sender.replaceTrack(track ? (track as TrackWrapper).track : null);
|
||||
}
|
||||
|
||||
prepareForPurpose(purpose: SDPStreamMetadataPurpose): void {
|
||||
prepareSenderForPurpose(peerConnection: PeerConnection, sender: Sender, purpose: SDPStreamMetadataPurpose): void {
|
||||
if (purpose === SDPStreamMetadataPurpose.Screenshare) {
|
||||
this.getRidOfRTXCodecs();
|
||||
this.getRidOfRTXCodecs(peerConnection as RTCPeerConnection, sender as RTCRtpSender);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method removes all video/rtx codecs from screensharing video
|
||||
* transceivers. This is necessary since they can cause problems. Without
|
||||
* this the following steps should produce an error:
|
||||
* Chromium calls Firefox
|
||||
* Firefox answers
|
||||
* Firefox starts screen-sharing
|
||||
* Chromium starts screen-sharing
|
||||
* Call crashes for Chromium with:
|
||||
* [96685:23:0518/162603.933321:ERROR:webrtc_video_engine.cc(3296)] RTX codec (PT=97) mapped to PT=96 which is not in the codec list.
|
||||
* [96685:23:0518/162603.933377:ERROR:webrtc_video_engine.cc(1171)] GetChangedRecvParameters called without any video codecs.
|
||||
* [96685:23:0518/162603.933430:ERROR:sdp_offer_answer.cc(4302)] Failed to set local video description recv parameters for m-section with mid='2'. (INVALID_PARAMETER)
|
||||
*/
|
||||
private getRidOfRTXCodecs(): void {
|
||||
private getRidOfRTXCodecs(peerConnection: RTCPeerConnection, sender: RTCRtpSender): void {
|
||||
// RTCRtpReceiver.getCapabilities and RTCRtpSender.getCapabilities don't seem to be supported on FF
|
||||
if (!RTCRtpReceiver.getCapabilities || !RTCRtpSender.getCapabilities) return;
|
||||
|
||||
@ -182,172 +51,14 @@ export class DOMTrackSender extends DOMTrackSenderOrReceiver {
|
||||
codecs.splice(rtxCodecIndex, 1);
|
||||
}
|
||||
}
|
||||
if (this.transceiver.sender.track?.kind === "video" ||
|
||||
this.transceiver.receiver.track?.kind === "video") {
|
||||
this.transceiver.setCodecPreferences(codecs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class DOMPeerConnection implements PeerConnection {
|
||||
private readonly peerConnection: RTCPeerConnection;
|
||||
private readonly handler: PeerConnectionHandler;
|
||||
public readonly localStreams: Map<string, DOMStreamSender> = new Map();
|
||||
public readonly remoteStreams: Map<string, DOMStreamReceiver> = new Map();
|
||||
|
||||
constructor(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize) {
|
||||
this.handler = handler;
|
||||
this.peerConnection = new RTCPeerConnection({
|
||||
iceTransportPolicy: forceTURN ? 'relay' : undefined,
|
||||
iceServers: turnServers,
|
||||
iceCandidatePoolSize: iceCandidatePoolSize,
|
||||
});
|
||||
this.registerHandler();
|
||||
}
|
||||
|
||||
get iceGatheringState(): RTCIceGatheringState { return this.peerConnection.iceGatheringState; }
|
||||
get localDescription(): RTCSessionDescription | undefined { return this.peerConnection.localDescription ?? undefined; }
|
||||
get signalingState(): RTCSignalingState { return this.peerConnection.signalingState; }
|
||||
|
||||
createOffer(): Promise<RTCSessionDescriptionInit> {
|
||||
return this.peerConnection.createOffer();
|
||||
}
|
||||
|
||||
createAnswer(): Promise<RTCSessionDescriptionInit> {
|
||||
return this.peerConnection.createAnswer();
|
||||
}
|
||||
|
||||
setLocalDescription(description?: RTCSessionDescriptionInit): Promise<void> {
|
||||
return this.peerConnection.setLocalDescription(description);
|
||||
}
|
||||
|
||||
setRemoteDescription(description: RTCSessionDescriptionInit): Promise<void> {
|
||||
return this.peerConnection.setRemoteDescription(description);
|
||||
}
|
||||
|
||||
addIceCandidate(candidate: RTCIceCandidate): Promise<void> {
|
||||
return this.peerConnection.addIceCandidate(candidate);
|
||||
}
|
||||
|
||||
close(): void {
|
||||
return this.peerConnection.close();
|
||||
}
|
||||
|
||||
addTrack(track: Track): DOMTrackSender | undefined {
|
||||
if (!(track instanceof TrackWrapper)) {
|
||||
throw new Error("Not a TrackWrapper");
|
||||
}
|
||||
const sender = this.peerConnection.addTrack(track.track, track.stream);
|
||||
let streamSender = this.localStreams.get(track.stream.id);
|
||||
if (!streamSender) {
|
||||
// TODO: reuse existing stream wrapper here?
|
||||
streamSender = new DOMStreamSender(new StreamWrapper(track.stream));
|
||||
this.localStreams.set(track.stream.id, streamSender);
|
||||
}
|
||||
const trackSender = streamSender.update(this.peerConnection.getTransceivers(), sender);
|
||||
return trackSender;
|
||||
}
|
||||
|
||||
removeTrack(sender: TrackSender): void {
|
||||
if (!(sender instanceof DOMTrackSender)) {
|
||||
throw new Error("Not a DOMTrackSender");
|
||||
}
|
||||
this.peerConnection.removeTrack((sender as DOMTrackSender).transceiver.sender);
|
||||
// TODO: update localStreams
|
||||
}
|
||||
|
||||
createDataChannel(options: RTCDataChannelInit): any {
|
||||
return this.peerConnection.createDataChannel("channel", options);
|
||||
}
|
||||
|
||||
private registerHandler() {
|
||||
const pc = this.peerConnection;
|
||||
pc.addEventListener('negotiationneeded', this);
|
||||
pc.addEventListener('icecandidate', this);
|
||||
pc.addEventListener('iceconnectionstatechange', this);
|
||||
pc.addEventListener('icegatheringstatechange', this);
|
||||
pc.addEventListener('signalingstatechange', this);
|
||||
pc.addEventListener('track', this);
|
||||
pc.addEventListener('datachannel', this);
|
||||
}
|
||||
|
||||
private deregisterHandler() {
|
||||
const pc = this.peerConnection;
|
||||
pc.removeEventListener('negotiationneeded', this);
|
||||
pc.removeEventListener('icecandidate', this);
|
||||
pc.removeEventListener('iceconnectionstatechange', this);
|
||||
pc.removeEventListener('icegatheringstatechange', this);
|
||||
pc.removeEventListener('signalingstatechange', this);
|
||||
pc.removeEventListener('track', this);
|
||||
pc.removeEventListener('datachannel', this);
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
handleEvent(evt: Event) {
|
||||
switch (evt.type) {
|
||||
case "iceconnectionstatechange":
|
||||
this.handleIceConnectionStateChange();
|
||||
break;
|
||||
case "icecandidate":
|
||||
this.handleLocalIceCandidate(evt as RTCPeerConnectionIceEvent);
|
||||
break;
|
||||
case "icegatheringstatechange":
|
||||
this.handler.onIceGatheringStateChange(this.peerConnection.iceGatheringState);
|
||||
break;
|
||||
case "track":
|
||||
this.handleRemoteTrack(evt as RTCTrackEvent);
|
||||
break;
|
||||
case "negotiationneeded":
|
||||
this.handler.onNegotiationNeeded();
|
||||
break;
|
||||
case "datachannel":
|
||||
this.handler.onRemoteDataChannel((evt as RTCDataChannelEvent).channel);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.deregisterHandler();
|
||||
for (const r of this.remoteStreams.values()) {
|
||||
r.stream.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
private handleLocalIceCandidate(event: RTCPeerConnectionIceEvent) {
|
||||
if (event.candidate) {
|
||||
this.handler.onLocalIceCandidate(event.candidate);
|
||||
}
|
||||
};
|
||||
|
||||
private handleIceConnectionStateChange() {
|
||||
const {iceConnectionState} = this.peerConnection;
|
||||
if (iceConnectionState === "failed" && this.peerConnection.restartIce) {
|
||||
this.peerConnection.restartIce();
|
||||
} else {
|
||||
this.handler.onIceConnectionStateChange(iceConnectionState);
|
||||
}
|
||||
}
|
||||
|
||||
onRemoteStreamEmpty = (stream: RemoteStreamWrapper): void => {
|
||||
if (this.remoteStreams.delete(stream.id)) {
|
||||
this.handler.onRemoteStreamRemoved(stream);
|
||||
}
|
||||
}
|
||||
|
||||
private handleRemoteTrack(evt: RTCTrackEvent) {
|
||||
if (evt.streams.length !== 1) {
|
||||
throw new Error("track in multiple streams is not supported");
|
||||
}
|
||||
const stream = evt.streams[0];
|
||||
const transceivers = this.peerConnection.getTransceivers();
|
||||
let streamReceiver: DOMStreamReceiver | undefined = this.remoteStreams.get(stream.id);
|
||||
if (!streamReceiver) {
|
||||
streamReceiver = new DOMStreamReceiver(new RemoteStreamWrapper(stream, this.onRemoteStreamEmpty));
|
||||
this.remoteStreams.set(stream.id, streamReceiver);
|
||||
}
|
||||
const trackReceiver = streamReceiver.update(evt);
|
||||
if (trackReceiver) {
|
||||
this.handler.onRemoteTracksAdded(trackReceiver);
|
||||
|
||||
const transceiver = peerConnection.getTransceivers().find(t => t.sender === sender);
|
||||
if (transceiver && (
|
||||
transceiver.sender.track?.kind === "video" ||
|
||||
transceiver.receiver.track?.kind === "video"
|
||||
)
|
||||
) {
|
||||
transceiver.setCodecPreferences(codecs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,17 +17,17 @@
|
||||
<script id="main" type="module">
|
||||
import {main} from "./main";
|
||||
import {Platform} from "./Platform";
|
||||
import configJSON from "./assets/config.json?raw";
|
||||
import configURL from "./assets/config.json?url";
|
||||
import assetPaths from "./sdk/paths/vite";
|
||||
if (import.meta.env.PROD) {
|
||||
assetPaths.serviceWorker = "sw.js";
|
||||
}
|
||||
const platform = new Platform(
|
||||
document.body,
|
||||
const platform = new Platform({
|
||||
container: document.body,
|
||||
assetPaths,
|
||||
JSON.parse(configJSON),
|
||||
{development: import.meta.env.DEV}
|
||||
);
|
||||
configURL,
|
||||
options: {development: import.meta.env.DEV}
|
||||
});
|
||||
main(platform);
|
||||
</script>
|
||||
</body>
|
||||
|
@ -32,6 +32,7 @@ export async function main(platform) {
|
||||
// const recorder = new RecordRequester(createFetchRequest(clock.createTimeout));
|
||||
// const request = recorder.request;
|
||||
// window.getBrawlFetchLog = () => recorder.log();
|
||||
await platform.init();
|
||||
const navigation = createNavigation();
|
||||
platform.setNavigation(navigation);
|
||||
const urlRouter = createRouter({navigation, history: platform.history});
|
||||
|
@ -92,8 +92,12 @@ function isCacheableThumbnail(url) {
|
||||
|
||||
const baseURL = new URL(self.registration.scope);
|
||||
let pendingFetchAbortController = new AbortController();
|
||||
|
||||
async function handleRequest(request) {
|
||||
try {
|
||||
if (request.url.includes("config.json")) {
|
||||
return handleConfigRequest(request);
|
||||
}
|
||||
const url = new URL(request.url);
|
||||
// rewrite / to /index.html so it hits the cache
|
||||
if (url.origin === baseURL.origin && url.pathname === baseURL.pathname) {
|
||||
@ -119,6 +123,27 @@ async function handleRequest(request) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfigRequest(request) {
|
||||
let response = await readCache(request);
|
||||
const networkResponsePromise = fetchAndUpdateConfig(request);
|
||||
if (response) {
|
||||
return response;
|
||||
} else {
|
||||
return await networkResponsePromise;
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAndUpdateConfig(request) {
|
||||
const response = await fetch(request, {
|
||||
signal: pendingFetchAbortController.signal,
|
||||
headers: {
|
||||
"Cache-Control": "no-cache",
|
||||
},
|
||||
});
|
||||
updateCache(request, response.clone());
|
||||
return response;
|
||||
}
|
||||
|
||||
async function updateCache(request, response) {
|
||||
// don't write error responses to the cache
|
||||
if (response.status >= 400) {
|
||||
|
@ -1155,3 +1155,55 @@ button.RoomDetailsView_row::after {
|
||||
background-position: center;
|
||||
background-size: 36px;
|
||||
}
|
||||
|
||||
.CallView {
|
||||
max-height: 50vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.CallView ul {
|
||||
display: flex;
|
||||
margin: 0;
|
||||
gap: 12px;
|
||||
padding: 0;
|
||||
flex-wrap: wrap;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.StreamView {
|
||||
width: 360px;
|
||||
min-height: 200px;
|
||||
border: 2px var(--accent-color) solid;
|
||||
display: grid;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
background-color: black;
|
||||
}
|
||||
|
||||
.StreamView > * {
|
||||
grid-column: 1;
|
||||
grid-row: 1;
|
||||
}
|
||||
|
||||
.StreamView video {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.StreamView_avatar {
|
||||
align-self: center;
|
||||
justify-self: center;
|
||||
}
|
||||
|
||||
.StreamView_muteStatus {
|
||||
align-self: end;
|
||||
justify-self: start;
|
||||
}
|
||||
|
||||
.StreamView_muteStatus.microphoneMuted::before {
|
||||
content: "mic muted";
|
||||
}
|
||||
|
||||
.StreamView_muteStatus.cameraMuted::before {
|
||||
content: "cam muted";
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import { setAttribute, text, isChildren, classNames, TAG_NAMES, HTML_NS, ClassNames, Child} from "./html";
|
||||
import { setAttribute, text, isChildren, classNames, TAG_NAMES, HTML_NS, ClassNames, Child as NonBoundChild} from "./html";
|
||||
import {mountView} from "./utils";
|
||||
import {BaseUpdateView, IObservableValue} from "./BaseUpdateView";
|
||||
import {IMountArgs, ViewNode, IView} from "./types";
|
||||
@ -30,12 +30,15 @@ function objHasFns(obj: ClassNames<unknown>): obj is { [className: string]: bool
|
||||
}
|
||||
|
||||
export type RenderFn<T> = (t: Builder<T>, vm: T) => ViewNode;
|
||||
type TextBinding<T> = (T) => string | number | boolean | undefined | null;
|
||||
type Child<T> = NonBoundChild | TextBinding<T>;
|
||||
type Children<T> = Child<T> | Child<T>[];
|
||||
type EventHandler = ((event: Event) => void);
|
||||
type AttributeStaticValue = string | boolean;
|
||||
type AttributeBinding<T> = (value: T) => AttributeStaticValue;
|
||||
export type AttrValue<T> = AttributeStaticValue | AttributeBinding<T> | EventHandler | ClassNames<T>;
|
||||
export type Attributes<T> = { [attribute: string]: AttrValue<T> };
|
||||
type ElementFn<T> = (attributes?: Attributes<T> | Child | Child[], children?: Child | Child[]) => Element;
|
||||
type ElementFn<T> = (attributes?: Attributes<T> | Children<T>, children?: Children<T>) => Element;
|
||||
export type Builder<T> = TemplateBuilder<T> & { [tagName in typeof TAG_NAMES[string][number]]: ElementFn<T> };
|
||||
|
||||
/**
|
||||
@ -178,7 +181,7 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
this._templateView._addEventListener(node, name, fn, useCapture);
|
||||
}
|
||||
|
||||
_addAttributeBinding(node: Element, name: string, fn: (value: T) => boolean | string): void {
|
||||
_addAttributeBinding(node: Element, name: string, fn: AttributeBinding<T>): void {
|
||||
let prevValue: string | boolean | undefined = undefined;
|
||||
const binding = () => {
|
||||
const newValue = fn(this._value);
|
||||
@ -195,15 +198,15 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
this._addAttributeBinding(node, "className", value => classNames(obj, value));
|
||||
}
|
||||
|
||||
_addTextBinding(fn: (value: T) => string): Text {
|
||||
const initialValue = fn(this._value);
|
||||
_addTextBinding(fn: (value: T) => ReturnType<TextBinding<T>>): Text {
|
||||
const initialValue = fn(this._value)+"";
|
||||
const node = text(initialValue);
|
||||
let prevValue = initialValue;
|
||||
const binding = () => {
|
||||
const newValue = fn(this._value);
|
||||
const newValue = fn(this._value)+"";
|
||||
if (prevValue !== newValue) {
|
||||
prevValue = newValue;
|
||||
node.textContent = newValue+"";
|
||||
node.textContent = newValue;
|
||||
}
|
||||
};
|
||||
|
||||
@ -242,7 +245,7 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
}
|
||||
}
|
||||
|
||||
_setNodeChildren(node: Element, children: Child | Child[]): void{
|
||||
_setNodeChildren(node: Element, children: Children<T>): void{
|
||||
if (!Array.isArray(children)) {
|
||||
children = [children];
|
||||
}
|
||||
@ -276,14 +279,18 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
return node;
|
||||
}
|
||||
|
||||
el(name: string, attributes?: Attributes<T> | Child | Child[], children?: Child | Child[]): ViewNode {
|
||||
el(name: string, attributes?: Attributes<T> | Children<T>, children?: Children<T>): ViewNode {
|
||||
return this.elNS(HTML_NS, name, attributes, children);
|
||||
}
|
||||
|
||||
elNS(ns: string, name: string, attributes?: Attributes<T> | Child | Child[], children?: Child | Child[]): ViewNode {
|
||||
if (attributes !== undefined && isChildren(attributes)) {
|
||||
children = attributes;
|
||||
attributes = undefined;
|
||||
elNS(ns: string, name: string, attributesOrChildren?: Attributes<T> | Children<T>, children?: Children<T>): ViewNode {
|
||||
let attributes: Attributes<T> | undefined;
|
||||
if (attributesOrChildren) {
|
||||
if (isChildren(attributesOrChildren)) {
|
||||
children = attributesOrChildren as Children<T>;
|
||||
} else {
|
||||
attributes = attributesOrChildren as Attributes<T>;
|
||||
}
|
||||
}
|
||||
|
||||
const node = document.createElementNS(ns, name);
|
||||
@ -330,7 +337,7 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
// Special case of mapView for a TemplateView.
|
||||
// Always creates a TemplateView, if this is optional depending
|
||||
// on mappedValue, use `if` or `mapView`
|
||||
map<R>(mapFn: (value: T) => R, renderFn: (mapped: R, t: Builder<T>, vm: T) => ViewNode): ViewNode {
|
||||
map<R>(mapFn: (value: T) => R, renderFn: (mapped: R, t: Builder<T>, vm: T) => ViewNode | undefined): ViewNode {
|
||||
return this.mapView(mapFn, mappedValue => {
|
||||
return new InlineTemplateView(this._value, (t, vm) => {
|
||||
const rootNode = renderFn(mappedValue, t, vm);
|
||||
@ -364,17 +371,17 @@ export class TemplateBuilder<T extends IObservableValue> {
|
||||
event handlers, ...
|
||||
You should not call the TemplateBuilder (e.g. `t.xxx()`) at all from the side effect,
|
||||
instead use tags from html.ts to help you construct any DOM you need. */
|
||||
mapSideEffect<R>(mapFn: (value: T) => R, sideEffect: (newV: R, oldV: R | undefined) => void) {
|
||||
mapSideEffect<R>(mapFn: (value: T) => R, sideEffect: (newV: R, oldV: R | undefined, value: T) => void) {
|
||||
let prevValue = mapFn(this._value);
|
||||
const binding = () => {
|
||||
const newValue = mapFn(this._value);
|
||||
if (prevValue !== newValue) {
|
||||
sideEffect(newValue, prevValue);
|
||||
sideEffect(newValue, prevValue, this._value);
|
||||
prevValue = newValue;
|
||||
}
|
||||
};
|
||||
this._addBinding(binding);
|
||||
sideEffect(prevValue, undefined);
|
||||
sideEffect(prevValue, undefined, this._value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,36 +14,50 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
import {TemplateView, TemplateBuilder} from "../../general/TemplateView";
|
||||
import {TemplateView, Builder} from "../../general/TemplateView";
|
||||
import {AvatarView} from "../../AvatarView";
|
||||
import {ListView} from "../../general/ListView";
|
||||
import {Stream} from "../../../../types/MediaDevices";
|
||||
import type {StreamWrapper} from "../../../dom/MediaDevices";
|
||||
import type {CallViewModel, CallMemberViewModel} from "../../../../../domain/session/room/CallViewModel";
|
||||
|
||||
function bindVideoTracks<T>(t: TemplateBuilder<T>, video: HTMLVideoElement, propSelector: (vm: T) => Stream | undefined) {
|
||||
t.mapSideEffect(propSelector, stream => {
|
||||
if (stream) {
|
||||
video.srcObject = (stream as StreamWrapper).stream;
|
||||
}
|
||||
});
|
||||
return video;
|
||||
}
|
||||
import type {CallViewModel, CallMemberViewModel, IStreamViewModel} from "../../../../../domain/session/room/CallViewModel";
|
||||
|
||||
export class CallView extends TemplateView<CallViewModel> {
|
||||
render(t: TemplateBuilder<CallViewModel>, vm: CallViewModel): HTMLElement {
|
||||
render(t: Builder<CallViewModel>, vm: CallViewModel): Element {
|
||||
return t.div({class: "CallView"}, [
|
||||
t.p(vm => `Call ${vm.name} (${vm.id})`),
|
||||
t.div({class: "CallView_me"}, bindVideoTracks(t, t.video({autoplay: true, width: 240}), vm => vm.localStream)),
|
||||
t.view(new ListView({list: vm.memberViewModels}, vm => new MemberView(vm))),
|
||||
t.view(new ListView({list: vm.memberViewModels}, vm => new StreamView(vm))),
|
||||
t.div({class: "buttons"}, [
|
||||
t.button({onClick: () => vm.leave()}, "Leave")
|
||||
t.button({onClick: () => vm.leave()}, "Leave"),
|
||||
t.button({onClick: () => vm.toggleVideo()}, "Toggle video"),
|
||||
])
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
class MemberView extends TemplateView<CallMemberViewModel> {
|
||||
render(t: TemplateBuilder<CallMemberViewModel>, vm: CallMemberViewModel) {
|
||||
return bindVideoTracks(t, t.video({autoplay: true, width: 360}), vm => vm.stream);
|
||||
class StreamView extends TemplateView<IStreamViewModel> {
|
||||
render(t: Builder<IStreamViewModel>, vm: IStreamViewModel): Element {
|
||||
const video = t.video({
|
||||
autoplay: true,
|
||||
className: {
|
||||
hidden: vm => vm.isCameraMuted
|
||||
}
|
||||
}) as HTMLVideoElement;
|
||||
t.mapSideEffect(vm => vm.stream, stream => {
|
||||
video.srcObject = stream as MediaStream;
|
||||
});
|
||||
return t.div({className: "StreamView"}, [
|
||||
video,
|
||||
t.div({className: {
|
||||
StreamView_avatar: true,
|
||||
hidden: vm => !vm.isCameraMuted
|
||||
}}, t.view(new AvatarView(vm, 64), {parentProvidesUpdates: true})),
|
||||
t.div({
|
||||
className: {
|
||||
StreamView_muteStatus: true,
|
||||
hidden: vm => !vm.isCameraMuted && !vm.isMicrophoneMuted,
|
||||
microphoneMuted: vm => vm.isMicrophoneMuted && !vm.isCameraMuted,
|
||||
cameraMuted: vm => vm.isCameraMuted,
|
||||
}
|
||||
})
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
@ -1,52 +0,0 @@
|
||||
/*
|
||||
Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
export class AsyncQueue<T, V> {
|
||||
private isRunning = false;
|
||||
private queue: T[] = [];
|
||||
private error?: Error;
|
||||
|
||||
constructor(
|
||||
private readonly reducer: (v: V, t: T) => Promise<V>,
|
||||
private value: V,
|
||||
private readonly contains: (t: T, queue: T[]) => boolean = (t, queue) => queue.includes(t)
|
||||
) {}
|
||||
|
||||
push(t: T) {
|
||||
if (this.contains(t, this.queue)) {
|
||||
return;
|
||||
}
|
||||
this.queue.push(t);
|
||||
this.runLoopIfNeeded();
|
||||
}
|
||||
|
||||
private async runLoopIfNeeded() {
|
||||
if (this.isRunning || this.error) {
|
||||
return;
|
||||
}
|
||||
this.isRunning = true;
|
||||
try {
|
||||
let item: T | undefined;
|
||||
while (item = this.queue.shift()) {
|
||||
this.value = await this.reducer(this.value, item);
|
||||
}
|
||||
} catch (err) {
|
||||
this.error = err;
|
||||
} finally {
|
||||
this.isRunning = false;
|
||||
}
|
||||
}
|
||||
}
|
@ -31,6 +31,7 @@ const commonOptions = {
|
||||
assetsInlineLimit: 0,
|
||||
polyfillModulePreload: false,
|
||||
},
|
||||
assetsInclude: ['**/config.json'],
|
||||
define: {
|
||||
DEFINE_VERSION: JSON.stringify(version),
|
||||
DEFINE_GLOBAL_HASH: JSON.stringify(null),
|
||||
|
@ -14,6 +14,11 @@ export default defineConfig(({mode}) => {
|
||||
outDir: "../../../target",
|
||||
minify: true,
|
||||
sourcemap: true,
|
||||
rollupOptions: {
|
||||
output: {
|
||||
assetFileNames: (asset) => asset.name.includes("config.json") ? "assets/[name][extname]": "assets/[name].[hash][extname]",
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
themeBuilder({
|
||||
|
Loading…
Reference in New Issue
Block a user