This commit is contained in:
Bruno Windels 2022-04-13 18:34:01 +02:00
parent 2d4301fe5a
commit bc118b5c0b
13 changed files with 154 additions and 128 deletions

View file

@ -18,7 +18,7 @@ import {ViewModel, Options as BaseOptions} from "../../ViewModel";
import type {GroupCall} from "../../../matrix/calls/group/GroupCall";
import type {Member} from "../../../matrix/calls/group/Member";
import type {BaseObservableList} from "../../../observable/list/BaseObservableList";
import type {Track} from "../../../platform/types/MediaDevices";
import type {Stream} from "../../../platform/types/MediaDevices";
type Options = BaseOptions & {call: GroupCall};
@ -46,8 +46,8 @@ export class CallViewModel extends ViewModel<Options> {
return this.call.id;
}
get localTracks(): Track[] {
return this.call.localMedia?.tracks ?? [];
get localStream(): Stream | undefined {
return this.call.localMedia?.userMedia;
}
leave() {
@ -60,8 +60,8 @@ export class CallViewModel extends ViewModel<Options> {
type MemberOptions = BaseOptions & {member: Member};
export class CallMemberViewModel extends ViewModel<MemberOptions> {
get tracks(): Track[] {
return this.member.remoteTracks;
get stream(): Stream | undefined {
return this.member.remoteMedia?.userMedia;
}
private get member(): Member {

View file

@ -365,8 +365,9 @@ export class RoomViewModel extends ViewModel {
async startCall() {
try {
const session = this.getOption("session");
const mediaTracks = await this.platform.mediaDevices.getMediaTracks(false, true);
const localMedia = new LocalMedia().withTracks(mediaTracks);
const stream = await this.platform.mediaDevices.getMediaTracks(false, true);
const localMedia = new LocalMedia().withUserMedia(stream);
await this._call.join(localMedia);
// this will set the callViewModel above as a call will be added to callHandler.calls
const call = await session.callHandler.createCall(this._room.id, localMedia, "A call " + Math.round(this.platform.random() * 100));
await call.join(localMedia);

View file

@ -74,9 +74,8 @@ export class CallTile extends SimpleTile {
async join() {
if (this.canJoin) {
const mediaTracks = await this.platform.mediaDevices.getMediaTracks(false, true);
// const screenShareTrack = await this.platform.mediaDevices.getScreenShareTrack();
const localMedia = new LocalMedia().withTracks(mediaTracks);
const stream = await this.platform.mediaDevices.getMediaTracks(false, true);
const localMedia = new LocalMedia().withUserMedia(stream);
await this._call.join(localMedia);
}
}

View file

@ -16,7 +16,7 @@ limitations under the License.
import {ObservableMap} from "../../observable/map/ObservableMap";
import {WebRTC, PeerConnection, PeerConnectionHandler} from "../../platform/types/WebRTC";
import {MediaDevices, Track, AudioTrack, TrackType} from "../../platform/types/MediaDevices";
import {MediaDevices, Track, AudioTrack} from "../../platform/types/MediaDevices";
import {handlesEventType} from "./PeerCall";
import {EventType, CallIntent} from "./callEventTypes";
import {GroupCall} from "./group/GroupCall";

View file

@ -37,24 +37,6 @@ export class LocalMedia {
return new LocalMedia(this.userMedia, this.screenShare, options);
}
getSDPMetadata(): SDPStreamMetadata {
const metadata = {};
const userMediaTrack = this.microphoneTrack ?? this.cameraTrack;
if (userMediaTrack) {
metadata[userMediaTrack.streamId] = {
purpose: SDPStreamMetadataPurpose.Usermedia,
audio_muted: this.microphoneTrack?.muted ?? true,
video_muted: this.cameraTrack?.muted ?? true,
};
}
if (this.screenShareTrack) {
metadata[this.screenShareTrack.streamId] = {
purpose: SDPStreamMetadataPurpose.Screenshare
};
}
return metadata;
}
clone() {
return new LocalMedia(this.userMedia?.clone(), this.screenShare?.clone(), this.dataChannelOptions);
}

View file

@ -23,8 +23,8 @@ import type {StateEvent} from "../storage/types";
import type {ILogItem} from "../../logging/types";
import type {TimeoutCreator, Timeout} from "../../platform/types/types";
import {WebRTC, PeerConnection, PeerConnectionHandler} from "../../platform/types/WebRTC";
import {MediaDevices, Track, AudioTrack, TrackType} from "../../platform/types/MediaDevices";
import {WebRTC, PeerConnection, PeerConnectionHandler, TrackSender, TrackReceiver} from "../../platform/types/WebRTC";
import {MediaDevices, Track, AudioTrack, Stream} from "../../platform/types/MediaDevices";
import type {LocalMedia} from "./LocalMedia";
import {
@ -52,6 +52,10 @@ export type Options = {
sendSignallingMessage: (message: SignallingMessage<MCallBase>, log: ILogItem) => Promise<void>;
};
export class RemoteMedia {
constructor(public userMedia?: Stream | undefined, public screenShare?: Stream | undefined) {}
}
// when sending, we need to encrypt message with olm. I think the flow of room => roomEncryption => olmEncryption as we already
// do for sharing keys will be best as that already deals with room tracking.
/**
@ -89,6 +93,7 @@ export class PeerCall implements IDisposable {
private _dataChannel?: any;
private _hangupReason?: CallErrorCode;
private _remoteMedia: RemoteMedia;
constructor(
private callId: string,
@ -96,6 +101,7 @@ export class PeerCall implements IDisposable {
private readonly logItem: ILogItem,
) {
const outer = this;
this._remoteMedia = new RemoteMedia();
this.peerConnection = options.webRTC.createPeerConnection({
onIceConnectionStateChange(state: RTCIceConnectionState) {
outer.logItem.wrap({l: "onIceConnectionStateChange", status: state}, log => {
@ -112,9 +118,14 @@ export class PeerCall implements IDisposable {
outer.handleIceGatheringState(state, log);
});
},
onRemoteTracksChanged(tracks: Track[]) {
outer.logItem.wrap({l: "onRemoteTracksChanged", length: tracks.length}, log => {
outer.options.emitUpdate(outer, undefined);
onRemoteStreamRemoved(stream: Stream) {
outer.logItem.wrap("onRemoteStreamRemoved", log => {
outer.updateRemoteMedia(log);
});
},
onRemoteTracksAdded(trackReceiver: TrackReceiver) {
outer.logItem.wrap("onRemoteTracksAdded", log => {
outer.updateRemoteMedia(log);
});
},
onRemoteDataChannel(dataChannel: any | undefined) {
@ -130,9 +141,6 @@ export class PeerCall implements IDisposable {
});
};
outer.responsePromiseChain = outer.responsePromiseChain?.then(promiseCreator) ?? promiseCreator();
},
getPurposeForStreamId(streamId: string): SDPStreamMetadataPurpose {
return outer.remoteSDPStreamMetadata?.[streamId]?.purpose ?? SDPStreamMetadataPurpose.Usermedia;
}
}, this.options.forceTURN, this.options.turnServers, 0);
}
@ -143,8 +151,9 @@ export class PeerCall implements IDisposable {
get hangupReason(): CallErrorCode | undefined { return this._hangupReason; }
get remoteTracks(): Track[] {
return this.peerConnection.remoteTracks;
// we should keep an object with streams by purpose ... e.g. RemoteMedia?
get remoteMedia(): Readonly<RemoteMedia> {
return this._remoteMedia;
}
call(localMedia: LocalMedia): Promise<void> {
@ -152,13 +161,10 @@ export class PeerCall implements IDisposable {
if (this._state !== CallState.Fledgling) {
return;
}
this.localMedia = localMedia;
this.direction = CallDirection.Outbound;
this.setState(CallState.CreateOffer, log);
for (const t of this.localMedia.tracks) {
this.peerConnection.addTrack(t);
}
if (this.localMedia.dataChannelOptions) {
this.setMedia(localMedia);
if (this.localMedia?.dataChannelOptions) {
this._dataChannel = this.peerConnection.createDataChannel(this.localMedia.dataChannelOptions);
}
// after adding the local tracks, and wait for handleNegotiation to be called,
@ -172,11 +178,8 @@ export class PeerCall implements IDisposable {
if (this._state !== CallState.Ringing) {
return;
}
this.localMedia = localMedia;
this.setState(CallState.CreateAnswer, log);
for (const t of this.localMedia.tracks) {
this.peerConnection.addTrack(t);
}
this.setMedia(localMedia, log);
let myAnswer: RTCSessionDescriptionInit;
try {
myAnswer = await this.peerConnection.createAnswer();
@ -205,27 +208,40 @@ export class PeerCall implements IDisposable {
});
}
setMedia(localMediaPromise: Promise<LocalMedia>): Promise<void> {
return this.logItem.wrap("setMedia", async log => {
setMedia(localMedia: LocalMedia, logItem: ILogItem = this.logItem): Promise<void> {
return logItem.wrap("setMedia", async log => {
const oldMedia = this.localMedia;
this.localMedia = await localMediaPromise;
this.localMedia = localMedia;
const applyStream = (oldStream: Stream | undefined, stream: Stream | undefined, logLabel: string) => {
const streamSender = oldMedia ? this.peerConnection.localStreams.get(oldStream!.id) : undefined;
const applyTrack = (selectTrack: (media: LocalMedia | undefined) => Track | undefined) => {
const oldTrack = selectTrack(oldMedia);
const newTrack = selectTrack(this.localMedia);
if (oldTrack && newTrack) {
this.peerConnection.replaceTrack(oldTrack, newTrack);
} else if (oldTrack) {
this.peerConnection.removeTrack(oldTrack);
} else if (newTrack) {
this.peerConnection.addTrack(newTrack);
const applyTrack = (oldTrack: Track | undefined, sender: TrackSender | undefined, track: Track | undefined) => {
if (track) {
if (oldTrack && sender) {
log.wrap(`replacing ${logLabel} ${track.kind} track`, log => {
sender.replaceTrack(track);
});
} else {
log.wrap(`adding ${logLabel} ${track.kind} track`, log => {
this.peerConnection.addTrack(track);
});
}
} else {
if (sender) {
log.wrap(`replacing ${logLabel} ${sender.track.kind} track`, log => {
this.peerConnection.removeTrack(sender);
});
}
}
}
};
// add the local tracks, and wait for onNegotiationNeeded and handleNegotiation to be called
applyTrack(m => m?.microphoneTrack);
applyTrack(m => m?.cameraTrack);
applyTrack(m => m?.screenShareTrack);
applyTrack(oldStream?.audioTrack, streamSender?.audioSender, stream?.audioTrack);
applyTrack(oldStream?.videoTrack, streamSender?.videoSender, stream?.videoTrack);
}
applyStream(oldMedia?.userMedia, localMedia?.userMedia, "userMedia");
applyStream(oldMedia?.screenShare, localMedia?.screenShare, "screenShare");
// TODO: datachannel, but don't do it here as we don't want to do it from answer, rather in different method
});
}
@ -321,7 +337,7 @@ export class PeerCall implements IDisposable {
const content = {
call_id: this.callId,
offer,
[SDPStreamMetadataKey]: this.localMedia!.getSDPMetadata(),
[SDPStreamMetadataKey]: this.getSDPMetadata(),
version: 1,
seq: this.seq++,
lifetime: CALL_TIMEOUT_MS
@ -408,7 +424,7 @@ export class PeerCall implements IDisposable {
const sdpStreamMetadata = content[SDPStreamMetadataKey];
if (sdpStreamMetadata) {
this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
this.updateRemoteSDPStreamMetadata(sdpStreamMetadata, log);
} else {
log.log(`Call did not get any SDPStreamMetadata! Can not send/receive multiple streams`);
}
@ -470,7 +486,7 @@ export class PeerCall implements IDisposable {
const sdpStreamMetadata = content[SDPStreamMetadataKey];
if (sdpStreamMetadata) {
this.updateRemoteSDPStreamMetadata(sdpStreamMetadata);
this.updateRemoteSDPStreamMetadata(sdpStreamMetadata, log);
} else {
log.log(`Did not get any SDPStreamMetadata! Can not send/receive multiple streams`);
}
@ -596,7 +612,7 @@ export class PeerCall implements IDisposable {
// type: EventType.CallNegotiate,
// content: {
// description: this.peerConnection.localDescription!,
// [SDPStreamMetadataKey]: this.localMedia.getSDPMetadata(),
// [SDPStreamMetadataKey]: this.getSDPMetadata(),
// }
// });
// }
@ -615,7 +631,7 @@ export class PeerCall implements IDisposable {
sdp: localDescription.sdp,
type: localDescription.type,
},
[SDPStreamMetadataKey]: this.localMedia!.getSDPMetadata(),
[SDPStreamMetadataKey]: this.getSDPMetadata(),
};
// We have just taken the local description from the peerConn which will
@ -699,18 +715,11 @@ export class PeerCall implements IDisposable {
});
}
private updateRemoteSDPStreamMetadata(metadata: SDPStreamMetadata): void {
private updateRemoteSDPStreamMetadata(metadata: SDPStreamMetadata, log: ILogItem): void {
// this will accumulate all updates into one object, so we still have the old stream info when we change stream id
this.remoteSDPStreamMetadata = recursivelyAssign(this.remoteSDPStreamMetadata || {}, metadata, true);
for (const track of this.peerConnection.remoteTracks) {
const streamMetaData = this.remoteSDPStreamMetadata?.[track.streamId];
if (streamMetaData) {
if (track.type === TrackType.Microphone) {
track.setMuted(streamMetaData.audio_muted);
} else { // Camera or ScreenShare
track.setMuted(streamMetaData.video_muted);
}
}
}
this.updateRemoteMedia(log);
// TODO: apply muting
}
private async addBufferedIceCandidates(log: ILogItem): Promise<void> {
@ -755,8 +764,6 @@ export class PeerCall implements IDisposable {
this.iceDisconnectedTimeout?.abort();
this.iceDisconnectedTimeout = undefined;
this.setState(CallState.Connected, log);
const transceivers = this.peerConnection.peerConnection.getTransceivers();
console.log(transceivers);
} else if (state == 'failed') {
this.iceDisconnectedTimeout?.abort();
this.iceDisconnectedTimeout = undefined;
@ -807,14 +814,53 @@ export class PeerCall implements IDisposable {
this.hangupParty = hangupParty;
this._hangupReason = hangupReason;
this.setState(CallState.Ended, log);
//this.localMedia?.dispose();
//this.localMedia = undefined;
this.localMedia?.dispose();
this.localMedia = undefined;
if (this.peerConnection && this.peerConnection.signalingState !== 'closed') {
this.peerConnection.close();
}
}
private getSDPMetadata(): SDPStreamMetadata {
const metadata = {};
if (this.localMedia?.userMedia) {
const streamId = this.localMedia.userMedia.id;
const streamSender = this.peerConnection.localStreams.get(streamId);
metadata[streamId] = {
purpose: SDPStreamMetadataPurpose.Usermedia,
audio_muted: !(streamSender?.audioSender?.enabled),
video_muted: !(streamSender?.videoSender?.enabled),
};
console.log("video_muted", streamSender?.videoSender?.enabled, streamSender?.videoSender?.transceiver?.direction, streamSender?.videoSender?.transceiver?.currentDirection, JSON.stringify(metadata));
}
if (this.localMedia?.screenShare) {
const streamId = this.localMedia.screenShare.id;
metadata[streamId] = {
purpose: SDPStreamMetadataPurpose.Screenshare
};
}
return metadata;
}
private updateRemoteMedia(log: ILogItem) {
this._remoteMedia.userMedia = undefined;
this._remoteMedia.screenShare = undefined;
if (this.remoteSDPStreamMetadata) {
for (const [streamId, streamReceiver] of this.peerConnection.remoteStreams.entries()) {
const metaData = this.remoteSDPStreamMetadata[streamId];
if (metaData) {
if (metaData.purpose === SDPStreamMetadataPurpose.Usermedia) {
this._remoteMedia.userMedia = streamReceiver.stream;
} else if (metaData.purpose === SDPStreamMetadataPurpose.Screenshare) {
this._remoteMedia.screenShare = streamReceiver.stream;
}
}
}
}
this.options.emitUpdate(this, undefined);
}
private async delay(timeoutMs: number): Promise<void> {
// Allow a short time for initial candidates to be gathered
const timeout = this.disposables.track(this.options.createTimeout(timeoutMs));

View file

@ -165,7 +165,7 @@ export class GroupCall extends EventEmitter<{change: never}> {
this._state = GroupCallState.Creating;
this.emitChange();
this.callContent = Object.assign({
"m.type": localMedia.cameraTrack ? "m.video" : "m.voice",
"m.type": localMedia.userMedia?.videoTrack ? "m.video" : "m.voice",
}, this.callContent);
const request = this.options.hsApi.sendState(this.roomId, EventType.GroupCall, this.id, this.callContent!, {log});
await request.response();

View file

@ -19,10 +19,9 @@ import {makeTxnId, makeId} from "../../common";
import {EventType, CallErrorCode} from "../callEventTypes";
import {formatToDeviceMessagesPayload} from "../../common";
import type {Options as PeerCallOptions} from "../PeerCall";
import type {Options as PeerCallOptions, RemoteMedia} from "../PeerCall";
import type {LocalMedia} from "../LocalMedia";
import type {HomeServerApi} from "../../net/HomeServerApi";
import type {Track} from "../../../platform/types/MediaDevices";
import type {MCallBase, MGroupCallBase, SignallingMessage, CallDeviceMembership} from "../callEventTypes";
import type {GroupCall} from "./GroupCall";
import type {RoomMember} from "../../room/members/RoomMember";
@ -60,8 +59,8 @@ export class Member {
private readonly logItem: ILogItem,
) {}
get remoteTracks(): Track[] {
return this.peerCall?.remoteTracks ?? [];
get remoteMedia(): RemoteMedia | undefined {
return this.peerCall?.remoteMedia;
}
get isConnected(): boolean {

View file

@ -18,7 +18,7 @@ import {Track, Stream} from "./MediaDevices";
import {SDPStreamMetadataPurpose} from "../../matrix/calls/callEventTypes";
export interface WebRTC {
createPeerConnection(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize): PeerConnection;
createPeerConnection(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize: number): PeerConnection;
}
export interface StreamSender {
@ -41,7 +41,7 @@ export interface TrackReceiver {
export interface TrackSender extends TrackReceiver {
/** replaces the track if possible without renegotiation. Can throw. */
replaceTrack(track: Track): Promise<void>;
replaceTrack(track: Track | undefined): Promise<void>;
/** make any needed adjustments to the sender or transceiver settings
* depending on the purpose, after adding the track to the connection */
prepareForPurpose(purpose: SDPStreamMetadataPurpose): void;
@ -61,8 +61,8 @@ export interface PeerConnection {
get iceGatheringState(): RTCIceGatheringState;
get signalingState(): RTCSignalingState;
get localDescription(): RTCSessionDescription | undefined;
get localStreams(): ReadonlyArray<StreamSender>;
get remoteStreams(): ReadonlyArray<StreamReceiver>;
get localStreams(): ReadonlyMap<string, StreamSender>;
get remoteStreams(): ReadonlyMap<string, StreamReceiver>;
createOffer(): Promise<RTCSessionDescriptionInit>;
createAnswer(): Promise<RTCSessionDescriptionInit>;
setLocalDescription(description?: RTCSessionDescriptionInit): Promise<void>;

View file

@ -72,8 +72,8 @@ export class MediaDevicesWrapper implements IMediaDevices {
export class StreamWrapper implements Stream {
public audioTrack: AudioTrackWrapper | undefined;
public videoTrack: TrackWrapper | undefined;
public audioTrack: AudioTrackWrapper | undefined = undefined;
public videoTrack: TrackWrapper | undefined = undefined;
constructor(public readonly stream: MediaStream) {
for (const track of stream.getTracks()) {
@ -91,13 +91,13 @@ export class StreamWrapper implements Stream {
if (track.kind === "video") {
if (!this.videoTrack || track.id !== this.videoTrack.track.id) {
this.videoTrack = new TrackWrapper(track, this.stream);
return this.videoTrack;
}
return this.videoTrack;
} else if (track.kind === "audio") {
if (!this.audioTrack || track.id !== this.audioTrack.track.id) {
this.audioTrack = new AudioTrackWrapper(track, this.stream);
return this.audioTrack;
}
return this.audioTrack;
}
}
}

View file

@ -62,10 +62,10 @@ export class DOMStreamSender implements StreamSender {
if (transceiver && sender.track) {
const trackWrapper = this.stream.update(sender.track);
if (trackWrapper) {
if (trackWrapper.kind === TrackKind.Video) {
if (trackWrapper.kind === TrackKind.Video && (!this.videoSender || this.videoSender.track.id !== trackWrapper.id)) {
this.videoSender = new DOMTrackSender(trackWrapper, transceiver);
return this.videoSender;
} else {
} else if (trackWrapper.kind === TrackKind.Audio && (!this.audioSender || this.audioSender.track.id !== trackWrapper.id)) {
this.audioSender = new DOMTrackSender(trackWrapper, transceiver);
return this.audioSender;
}
@ -105,20 +105,20 @@ export class DOMTrackSenderOrReceiver implements TrackReceiver {
) {}
get enabled(): boolean {
return this.transceiver.currentDirection === "sendrecv" ||
this.transceiver.currentDirection === this.exclusiveValue;
return this.transceiver.direction === "sendrecv" ||
this.transceiver.direction === this.exclusiveValue;
}
enable(enabled: boolean) {
if (enabled !== this.enabled) {
if (enabled) {
if (this.transceiver.currentDirection === "inactive") {
if (this.transceiver.direction === "inactive") {
this.transceiver.direction = this.exclusiveValue;
} else {
this.transceiver.direction = "sendrecv";
}
} else {
if (this.transceiver.currentDirection === "sendrecv") {
if (this.transceiver.direction === "sendrecv") {
this.transceiver.direction = this.excludedValue;
} else {
this.transceiver.direction = "inactive";
@ -145,7 +145,7 @@ export class DOMTrackSender extends DOMTrackSenderOrReceiver {
super(track, transceiver, "sendonly", "recvonly");
}
/** replaces the track if possible without renegotiation. Can throw. */
replaceTrack(track: Track): Promise<void> {
replaceTrack(track: Track | undefined): Promise<void> {
return this.transceiver.sender.replaceTrack(track ? (track as TrackWrapper).track : null);
}
@ -192,8 +192,8 @@ export class DOMTrackSender extends DOMTrackSenderOrReceiver {
class DOMPeerConnection implements PeerConnection {
private readonly peerConnection: RTCPeerConnection;
private readonly handler: PeerConnectionHandler;
public readonly localStreams: DOMStreamSender[];
public readonly remoteStreams: DOMStreamReceiver[];
public readonly localStreams: Map<string, DOMStreamSender> = new Map();
public readonly remoteStreams: Map<string, DOMStreamReceiver> = new Map();
constructor(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize) {
this.handler = handler;
@ -238,10 +238,11 @@ class DOMPeerConnection implements PeerConnection {
throw new Error("Not a TrackWrapper");
}
const sender = this.peerConnection.addTrack(track.track, track.stream);
let streamSender: DOMStreamSender | undefined = this.localStreams.find(s => s.stream.id === track.stream.id);
let streamSender = this.localStreams.get(track.stream.id);
if (!streamSender) {
// TODO: reuse existing stream wrapper here?
streamSender = new DOMStreamSender(new StreamWrapper(track.stream));
this.localStreams.push(streamSender);
this.localStreams.set(track.stream.id, streamSender);
}
const trackSender = streamSender.update(this.peerConnection.getTransceivers(), sender);
return trackSender;
@ -307,7 +308,7 @@ class DOMPeerConnection implements PeerConnection {
dispose(): void {
this.deregisterHandler();
for (const r of this.remoteStreams) {
for (const r of this.remoteStreams.values()) {
r.stream.dispose();
}
}
@ -328,23 +329,21 @@ class DOMPeerConnection implements PeerConnection {
}
onRemoteStreamEmpty = (stream: RemoteStreamWrapper): void => {
const idx = this.remoteStreams.findIndex(r => r.stream === stream);
if (idx !== -1) {
this.remoteStreams.splice(idx, 1);
if (this.remoteStreams.delete(stream.id)) {
this.handler.onRemoteStreamRemoved(stream);
}
}
private handleRemoteTrack(evt: RTCTrackEvent) {
if (evt.streams.length !== 0) {
if (evt.streams.length !== 1) {
throw new Error("track in multiple streams is not supported");
}
const stream = evt.streams[0];
const transceivers = this.peerConnection.getTransceivers();
let streamReceiver: DOMStreamReceiver | undefined = this.remoteStreams.find(r => r.stream.id === stream.id);
let streamReceiver: DOMStreamReceiver | undefined = this.remoteStreams.get(stream.id);
if (!streamReceiver) {
streamReceiver = new DOMStreamReceiver(new RemoteStreamWrapper(stream, this.onRemoteStreamEmpty));
this.remoteStreams.push(streamReceiver);
this.remoteStreams.set(stream.id, streamReceiver);
}
const trackReceiver = streamReceiver.update(evt);
if (trackReceiver) {

View file

@ -16,14 +16,14 @@ limitations under the License.
import {TemplateView, TemplateBuilder} from "../../general/TemplateView";
import {ListView} from "../../general/ListView";
import {Track, TrackType} from "../../../../types/MediaDevices";
import type {TrackWrapper} from "../../../dom/MediaDevices";
import {Stream} from "../../../../types/MediaDevices";
import type {StreamWrapper} from "../../../dom/MediaDevices";
import type {CallViewModel, CallMemberViewModel} from "../../../../../domain/session/room/CallViewModel";
function bindVideoTracks<T>(t: TemplateBuilder<T>, video: HTMLVideoElement, propSelector: (vm: T) => Track[]) {
t.mapSideEffect(propSelector, tracks => {
if (tracks.length) {
video.srcObject = (tracks[0] as TrackWrapper).stream;
function bindVideoTracks<T>(t: TemplateBuilder<T>, video: HTMLVideoElement, propSelector: (vm: T) => Stream | undefined) {
t.mapSideEffect(propSelector, stream => {
if (stream) {
video.srcObject = (stream as StreamWrapper).stream;
}
});
return video;
@ -33,7 +33,7 @@ export class CallView extends TemplateView<CallViewModel> {
render(t: TemplateBuilder<CallViewModel>, vm: CallViewModel): HTMLElement {
return t.div({class: "CallView"}, [
t.p(vm => `Call ${vm.name} (${vm.id})`),
t.div({class: "CallView_me"}, bindVideoTracks(t, t.video({autoplay: true, width: 240}), vm => vm.localTracks)),
t.div({class: "CallView_me"}, bindVideoTracks(t, t.video({autoplay: true, width: 240}), vm => vm.localStream)),
t.view(new ListView({list: vm.memberViewModels}, vm => new MemberView(vm))),
t.div({class: "buttons"}, [
t.button({onClick: () => vm.leave()}, "Leave")
@ -44,6 +44,6 @@ export class CallView extends TemplateView<CallViewModel> {
class MemberView extends TemplateView<CallMemberViewModel> {
render(t: TemplateBuilder<CallMemberViewModel>, vm: CallMemberViewModel) {
return bindVideoTracks(t, t.video({autoplay: true, width: 360}), vm => vm.tracks);
return bindVideoTracks(t, t.video({autoplay: true, width: 360}), vm => vm.stream);
}
}

View file

@ -1485,10 +1485,10 @@ type-fest@^0.20.2:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
typescript@^4.3.5:
version "4.6.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.2.tgz#fe12d2727b708f4eef40f51598b3398baa9611d4"
integrity sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg==
typescript@^4.4:
version "4.6.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.3.tgz#eefeafa6afdd31d725584c67a0eaba80f6fc6c6c"
integrity sha512-yNIatDa5iaofVozS/uQJEl3JRWLKKGJKh6Yaiv0GLGSuhpFJe7P3SbHZ8/yjAHRQwKRoA6YZqlfjXWmVzoVSMw==
typeson-registry@^1.0.0-alpha.20:
version "1.0.0-alpha.39"