WIP: expose streams, senders and receivers

This commit is contained in:
Bruno Windels 2022-04-12 21:20:24 +02:00
parent 36dc463d23
commit 2d4301fe5a
7 changed files with 308 additions and 192 deletions

View file

@ -75,6 +75,7 @@ export class CallTile extends SimpleTile {
async join() {
if (this.canJoin) {
const mediaTracks = await this.platform.mediaDevices.getMediaTracks(false, true);
// const screenShareTrack = await this.platform.mediaDevices.getScreenShareTrack();
const localMedia = new LocalMedia().withTracks(mediaTracks);
await this._call.join(localMedia);
}

View file

@ -15,37 +15,26 @@ limitations under the License.
*/
import {SDPStreamMetadataPurpose} from "./callEventTypes";
import {Track, AudioTrack, TrackType} from "../../platform/types/MediaDevices";
import {Stream} from "../../platform/types/MediaDevices";
import {SDPStreamMetadata} from "./callEventTypes";
export class LocalMedia {
constructor(
public readonly cameraTrack?: Track,
public readonly screenShareTrack?: Track,
public readonly microphoneTrack?: AudioTrack,
public readonly userMedia?: Stream,
public readonly screenShare?: Stream,
public readonly dataChannelOptions?: RTCDataChannelInit,
) {}
withTracks(tracks: Track[]) {
const cameraTrack = tracks.find(t => t.type === TrackType.Camera) ?? this.cameraTrack;
const screenShareTrack = tracks.find(t => t.type === TrackType.ScreenShare) ?? this.screenShareTrack;
const microphoneTrack = tracks.find(t => t.type === TrackType.Microphone) ?? this.microphoneTrack;
if (cameraTrack && microphoneTrack && cameraTrack.streamId !== microphoneTrack.streamId) {
throw new Error("The camera and audio track should have the same stream id");
}
return new LocalMedia(cameraTrack, screenShareTrack, microphoneTrack as AudioTrack, this.dataChannelOptions);
withUserMedia(stream: Stream) {
return new LocalMedia(stream, this.screenShare, this.dataChannelOptions);
}
withScreenShare(stream: Stream) {
return new LocalMedia(this.userMedia, stream, this.dataChannelOptions);
}
withDataChannel(options: RTCDataChannelInit): LocalMedia {
return new LocalMedia(this.cameraTrack, this.screenShareTrack, this.microphoneTrack as AudioTrack, options);
}
get tracks(): Track[] {
const tracks: Track[] = [];
if (this.cameraTrack) { tracks.push(this.cameraTrack); }
if (this.screenShareTrack) { tracks.push(this.screenShareTrack); }
if (this.microphoneTrack) { tracks.push(this.microphoneTrack); }
return tracks;
return new LocalMedia(this.userMedia, this.screenShare, options);
}
getSDPMetadata(): SDPStreamMetadata {
@ -54,8 +43,8 @@ export class LocalMedia {
if (userMediaTrack) {
metadata[userMediaTrack.streamId] = {
purpose: SDPStreamMetadataPurpose.Usermedia,
audio_muted: this.microphoneTrack?.muted ?? false,
video_muted: this.cameraTrack?.muted ?? false,
audio_muted: this.microphoneTrack?.muted ?? true,
video_muted: this.cameraTrack?.muted ?? true,
};
}
if (this.screenShareTrack) {
@ -67,13 +56,12 @@ export class LocalMedia {
}
clone() {
// TODO: implement
return this;
return new LocalMedia(this.userMedia?.clone(), this.screenShare?.clone(), this.dataChannelOptions);
}
dispose() {
this.cameraTrack?.stop();
this.microphoneTrack?.stop();
this.screenShareTrack?.stop();
this.userMedia?.audioTrack?.stop();
this.userMedia?.videoTrack?.stop();
this.screenShare?.videoTrack?.stop();
}
}

View file

@ -701,8 +701,6 @@ export class PeerCall implements IDisposable {
private updateRemoteSDPStreamMetadata(metadata: SDPStreamMetadata): void {
this.remoteSDPStreamMetadata = recursivelyAssign(this.remoteSDPStreamMetadata || {}, metadata, true);
// will rerequest stream purpose for all tracks and set track.type accordingly
this.peerConnection.notifyStreamPurposeChanged();
for (const track of this.peerConnection.remoteTracks) {
const streamMetaData = this.remoteSDPStreamMetadata?.[track.streamId];
if (streamMetaData) {
@ -757,6 +755,8 @@ export class PeerCall implements IDisposable {
this.iceDisconnectedTimeout?.abort();
this.iceDisconnectedTimeout = undefined;
this.setState(CallState.Connected, log);
const transceivers = this.peerConnection.peerConnection.getTransceivers();
console.log(transceivers);
} else if (state == 'failed') {
this.iceDisconnectedTimeout?.abort();
this.iceDisconnectedTimeout = undefined;

View file

@ -18,29 +18,32 @@ export interface MediaDevices {
// filter out audiooutput
enumerate(): Promise<MediaDeviceInfo[]>;
// to assign to a video element, we downcast to WrappedTrack and use the stream property.
getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Track[]>;
getScreenShareTrack(): Promise<Track | undefined>;
getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Stream>;
getScreenShareTrack(): Promise<Stream | undefined>;
}
export enum TrackType {
ScreenShare,
Camera,
Microphone,
export interface Stream {
readonly audioTrack: AudioTrack | undefined;
readonly videoTrack: Track | undefined;
readonly id: string;
clone(): Stream;
}
export enum TrackKind {
Video = "video",
Audio = "audio"
}
export interface Track {
get type(): TrackType;
get label(): string;
get id(): string;
get streamId(): string;
get settings(): MediaTrackSettings;
get muted(): boolean;
setMuted(muted: boolean): void;
readonly kind: TrackKind;
readonly label: string;
readonly id: string;
readonly settings: MediaTrackSettings;
stop(): void;
clone(): Track;
}
export interface AudioTrack extends Track {
// TODO: how to emit updates on this?
get isSpeaking(): boolean;
}

View file

@ -14,38 +14,62 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import {Track, TrackType} from "./MediaDevices";
import {Track, Stream} from "./MediaDevices";
import {SDPStreamMetadataPurpose} from "../../matrix/calls/callEventTypes";
export interface WebRTC {
createPeerConnection(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize): PeerConnection;
}
export interface StreamSender {
get stream(): Stream;
get audioSender(): TrackSender | undefined;
get videoSender(): TrackSender | undefined;
}
export interface StreamReceiver {
get stream(): Stream;
get audioReceiver(): TrackReceiver | undefined;
get videoReceiver(): TrackReceiver | undefined;
}
export interface TrackReceiver {
get track(): Track;
get enabled(): boolean;
enable(enabled: boolean); // this modifies the transceiver direction
}
export interface TrackSender extends TrackReceiver {
/** replaces the track if possible without renegotiation. Can throw. */
replaceTrack(track: Track): Promise<void>;
/** make any needed adjustments to the sender or transceiver settings
* depending on the purpose, after adding the track to the connection */
prepareForPurpose(purpose: SDPStreamMetadataPurpose): void;
}
export interface PeerConnectionHandler {
onIceConnectionStateChange(state: RTCIceConnectionState);
onLocalIceCandidate(candidate: RTCIceCandidate);
onIceGatheringStateChange(state: RTCIceGatheringState);
onRemoteTracksChanged(tracks: Track[]);
onRemoteStreamRemoved(stream: Stream);
onRemoteTracksAdded(receiver: TrackReceiver);
onRemoteDataChannel(dataChannel: any | undefined);
onNegotiationNeeded();
// request the type of incoming stream
getPurposeForStreamId(streamId: string): SDPStreamMetadataPurpose;
}
export interface PeerConnection {
notifyStreamPurposeChanged(): void;
get remoteTracks(): Track[];
get iceGatheringState(): RTCIceGatheringState;
get signalingState(): RTCSignalingState;
get localDescription(): RTCSessionDescription | undefined;
get localStreams(): ReadonlyArray<StreamSender>;
get remoteStreams(): ReadonlyArray<StreamReceiver>;
createOffer(): Promise<RTCSessionDescriptionInit>;
createAnswer(): Promise<RTCSessionDescriptionInit>;
setLocalDescription(description?: RTCSessionDescriptionInit): Promise<void>;
setRemoteDescription(description: RTCSessionDescriptionInit): Promise<void>;
addIceCandidate(candidate: RTCIceCandidate): Promise<void>;
addTrack(track: Track): void;
removeTrack(track: Track): boolean;
replaceTrack(oldTrack: Track, newTrack: Track): Promise<boolean>;
addTrack(track: Track): TrackSender | undefined;
removeTrack(track: TrackSender): void;
createDataChannel(options: RTCDataChannelInit): any;
dispose(): void;
close(): void;

View file

@ -15,7 +15,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import {MediaDevices as IMediaDevices, TrackType, Track, AudioTrack} from "../../types/MediaDevices";
import {MediaDevices as IMediaDevices, Stream, Track, TrackKind, AudioTrack} from "../../types/MediaDevices";
const POLLING_INTERVAL = 200; // ms
export const SPEAKING_THRESHOLD = -60; // dB
@ -28,22 +28,14 @@ export class MediaDevicesWrapper implements IMediaDevices {
return this.mediaDevices.enumerateDevices();
}
async getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Track[]> {
async getMediaTracks(audio: true | MediaDeviceInfo, video: boolean | MediaDeviceInfo): Promise<Stream> {
const stream = await this.mediaDevices.getUserMedia(this.getUserMediaContraints(audio, video));
const tracks = stream.getTracks().map(t => {
const type = t.kind === "audio" ? TrackType.Microphone : TrackType.Camera;
return wrapTrack(t, stream, type);
});
return tracks;
return new StreamWrapper(stream);
}
async getScreenShareTrack(): Promise<Track | undefined> {
async getScreenShareTrack(): Promise<Stream | undefined> {
const stream = await this.mediaDevices.getDisplayMedia(this.getScreenshareContraints());
const videoTrack = stream.getTracks().find(t => t.kind === "video");
if (videoTrack) {
return wrapTrack(videoTrack, stream, TrackType.ScreenShare);
}
return;
return new StreamWrapper(stream);
}
private getUserMediaContraints(audio: boolean | MediaDeviceInfo, video: boolean | MediaDeviceInfo): MediaStreamConstraints {
@ -78,43 +70,50 @@ export class MediaDevicesWrapper implements IMediaDevices {
}
}
export function wrapTrack(track: MediaStreamTrack, stream: MediaStream, type: TrackType) {
if (track.kind === "audio") {
return new AudioTrackWrapper(track, stream, type);
} else {
return new TrackWrapper(track, stream, type);
export class StreamWrapper implements Stream {
public audioTrack: AudioTrackWrapper | undefined;
public videoTrack: TrackWrapper | undefined;
constructor(public readonly stream: MediaStream) {
for (const track of stream.getTracks()) {
this.update(track);
}
}
get id(): string { return this.stream.id; }
clone(): Stream {
return new StreamWrapper(this.stream.clone());
}
update(track: MediaStreamTrack): TrackWrapper | undefined {
if (track.kind === "video") {
if (!this.videoTrack || track.id !== this.videoTrack.track.id) {
this.videoTrack = new TrackWrapper(track, this.stream);
return this.videoTrack;
}
} else if (track.kind === "audio") {
if (!this.audioTrack || track.id !== this.audioTrack.track.id) {
this.audioTrack = new AudioTrackWrapper(track, this.stream);
return this.audioTrack;
}
}
}
}
export class TrackWrapper implements Track {
constructor(
public readonly track: MediaStreamTrack,
public readonly stream: MediaStream,
private _type: TrackType,
public readonly stream: MediaStream
) {}
get type(): TrackType { return this._type; }
get kind(): TrackKind { return this.track.kind as TrackKind; }
get label(): string { return this.track.label; }
get id(): string { return this.track.id; }
get streamId(): string { return this.stream.id; }
get muted(): boolean { return this.track.muted; }
get settings(): MediaTrackSettings { return this.track.getSettings(); }
setMuted(muted: boolean): void {
this.track.enabled = !muted;
}
setType(type: TrackType): void {
this._type = type;
}
stop() {
this.track.stop();
}
clone() {
return this.track.clone();
}
stop() { this.track.stop(); }
}
export class AudioTrackWrapper extends TrackWrapper {
@ -127,8 +126,8 @@ export class AudioTrackWrapper extends TrackWrapper {
private volumeLooperTimeout: number;
private speakingVolumeSamples: number[];
constructor(track: MediaStreamTrack, stream: MediaStream, type: TrackType) {
super(track, stream, type);
constructor(track: MediaStreamTrack, stream: MediaStream) {
super(track, stream);
this.speakingVolumeSamples = new Array(SPEAKING_SAMPLE_COUNT).fill(-Infinity);
this.initVolumeMeasuring();
this.measureVolumeActivity(true);

View file

@ -14,9 +14,9 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
import {TrackWrapper, wrapTrack} from "./MediaDevices";
import {Track, TrackType} from "../../types/MediaDevices";
import {WebRTC, PeerConnectionHandler, DataChannel, PeerConnection} from "../../types/WebRTC";
import {StreamWrapper, TrackWrapper, AudioTrackWrapper} from "./MediaDevices";
import {Stream, Track, AudioTrack, TrackKind} from "../../types/MediaDevices";
import {WebRTC, PeerConnectionHandler, StreamSender, TrackSender, StreamReceiver, TrackReceiver, PeerConnection} from "../../types/WebRTC";
import {SDPStreamMetadataPurpose} from "../../../matrix/calls/callEventTypes";
const POLLING_INTERVAL = 200; // ms
@ -29,11 +29,171 @@ export class DOMWebRTC implements WebRTC {
}
}
export class RemoteStreamWrapper extends StreamWrapper {
constructor(stream: MediaStream, private readonly emptyCallback: (stream: RemoteStreamWrapper) => void) {
super(stream);
this.stream.addEventListener("removetrack", this.onTrackRemoved);
}
onTrackRemoved = (evt: MediaStreamTrackEvent) => {
if (evt.track.id === this.audioTrack?.track.id) {
this.audioTrack = undefined;
} else if (evt.track.id === this.videoTrack?.track.id) {
this.videoTrack = undefined;
}
if (!this.audioTrack && !this.videoTrack) {
this.emptyCallback(this);
}
};
dispose() {
this.stream.removeEventListener("removetrack", this.onTrackRemoved);
}
}
export class DOMStreamSender implements StreamSender {
public audioSender: DOMTrackSender | undefined;
public videoSender: DOMTrackSender | undefined;
constructor(public readonly stream: StreamWrapper) {}
update(transceivers: ReadonlyArray<RTCRtpTransceiver>, sender: RTCRtpSender): DOMTrackSender | undefined {
const transceiver = transceivers.find(t => t.sender === sender);
if (transceiver && sender.track) {
const trackWrapper = this.stream.update(sender.track);
if (trackWrapper) {
if (trackWrapper.kind === TrackKind.Video) {
this.videoSender = new DOMTrackSender(trackWrapper, transceiver);
return this.videoSender;
} else {
this.audioSender = new DOMTrackSender(trackWrapper, transceiver);
return this.audioSender;
}
}
}
}
}
export class DOMStreamReceiver implements StreamReceiver {
public audioReceiver: DOMTrackReceiver | undefined;
public videoReceiver: DOMTrackReceiver | undefined;
constructor(public readonly stream: RemoteStreamWrapper) {}
update(event: RTCTrackEvent): DOMTrackReceiver | undefined {
const {receiver} = event;
const {track} = receiver;
const trackWrapper = this.stream.update(track);
if (trackWrapper) {
if (trackWrapper.kind === TrackKind.Video) {
this.videoReceiver = new DOMTrackReceiver(trackWrapper, event.transceiver);
return this.videoReceiver;
} else {
this.audioReceiver = new DOMTrackReceiver(trackWrapper, event.transceiver);
return this.audioReceiver;
}
}
}
}
export class DOMTrackSenderOrReceiver implements TrackReceiver {
constructor(
public readonly track: TrackWrapper,
public readonly transceiver: RTCRtpTransceiver,
private readonly exclusiveValue: RTCRtpTransceiverDirection,
private readonly excludedValue: RTCRtpTransceiverDirection
) {}
get enabled(): boolean {
return this.transceiver.currentDirection === "sendrecv" ||
this.transceiver.currentDirection === this.exclusiveValue;
}
enable(enabled: boolean) {
if (enabled !== this.enabled) {
if (enabled) {
if (this.transceiver.currentDirection === "inactive") {
this.transceiver.direction = this.exclusiveValue;
} else {
this.transceiver.direction = "sendrecv";
}
} else {
if (this.transceiver.currentDirection === "sendrecv") {
this.transceiver.direction = this.excludedValue;
} else {
this.transceiver.direction = "inactive";
}
}
}
}
}
export class DOMTrackReceiver extends DOMTrackSenderOrReceiver {
constructor(
track: TrackWrapper,
transceiver: RTCRtpTransceiver,
) {
super(track, transceiver, "recvonly", "sendonly");
}
}
export class DOMTrackSender extends DOMTrackSenderOrReceiver {
constructor(
track: TrackWrapper,
transceiver: RTCRtpTransceiver,
) {
super(track, transceiver, "sendonly", "recvonly");
}
/** replaces the track if possible without renegotiation. Can throw. */
replaceTrack(track: Track): Promise<void> {
return this.transceiver.sender.replaceTrack(track ? (track as TrackWrapper).track : null);
}
prepareForPurpose(purpose: SDPStreamMetadataPurpose): void {
if (purpose === SDPStreamMetadataPurpose.Screenshare) {
this.getRidOfRTXCodecs();
}
}
/**
* This method removes all video/rtx codecs from screensharing video
* transceivers. This is necessary since they can cause problems. Without
* this the following steps should produce an error:
* Chromium calls Firefox
* Firefox answers
* Firefox starts screen-sharing
* Chromium starts screen-sharing
* Call crashes for Chromium with:
* [96685:23:0518/162603.933321:ERROR:webrtc_video_engine.cc(3296)] RTX codec (PT=97) mapped to PT=96 which is not in the codec list.
* [96685:23:0518/162603.933377:ERROR:webrtc_video_engine.cc(1171)] GetChangedRecvParameters called without any video codecs.
* [96685:23:0518/162603.933430:ERROR:sdp_offer_answer.cc(4302)] Failed to set local video description recv parameters for m-section with mid='2'. (INVALID_PARAMETER)
*/
private getRidOfRTXCodecs(): void {
// RTCRtpReceiver.getCapabilities and RTCRtpSender.getCapabilities don't seem to be supported on FF
if (!RTCRtpReceiver.getCapabilities || !RTCRtpSender.getCapabilities) return;
const recvCodecs = RTCRtpReceiver.getCapabilities("video")?.codecs ?? [];
const sendCodecs = RTCRtpSender.getCapabilities("video")?.codecs ?? [];
const codecs = [...sendCodecs, ...recvCodecs];
for (const codec of codecs) {
if (codec.mimeType === "video/rtx") {
const rtxCodecIndex = codecs.indexOf(codec);
codecs.splice(rtxCodecIndex, 1);
}
}
if (this.transceiver.sender.track?.kind === "video" ||
this.transceiver.receiver.track?.kind === "video") {
this.transceiver.setCodecPreferences(codecs);
}
}
}
class DOMPeerConnection implements PeerConnection {
private readonly peerConnection: RTCPeerConnection;
private readonly handler: PeerConnectionHandler;
//private dataChannelWrapper?: DOMDataChannel;
private _remoteTracks: TrackWrapper[] = [];
public readonly localStreams: DOMStreamSender[];
public readonly remoteStreams: DOMStreamReceiver[];
constructor(handler: PeerConnectionHandler, forceTURN: boolean, turnServers: RTCIceServer[], iceCandidatePoolSize) {
this.handler = handler;
@ -45,7 +205,6 @@ class DOMPeerConnection implements PeerConnection {
this.registerHandler();
}
get remoteTracks(): Track[] { return this._remoteTracks; }
get iceGatheringState(): RTCIceGatheringState { return this.peerConnection.iceGatheringState; }
get localDescription(): RTCSessionDescription | undefined { return this.peerConnection.localDescription ?? undefined; }
get signalingState(): RTCSignalingState { return this.peerConnection.signalingState; }
@ -74,48 +233,26 @@ class DOMPeerConnection implements PeerConnection {
return this.peerConnection.close();
}
addTrack(track: Track): void {
addTrack(track: Track): DOMTrackSender | undefined {
if (!(track instanceof TrackWrapper)) {
throw new Error("Not a TrackWrapper");
}
this.peerConnection.addTrack(track.track, track.stream);
if (track.type === TrackType.ScreenShare) {
this.getRidOfRTXCodecs(track);
const sender = this.peerConnection.addTrack(track.track, track.stream);
let streamSender: DOMStreamSender | undefined = this.localStreams.find(s => s.stream.id === track.stream.id);
if (!streamSender) {
streamSender = new DOMStreamSender(new StreamWrapper(track.stream));
this.localStreams.push(streamSender);
}
const trackSender = streamSender.update(this.peerConnection.getTransceivers(), sender);
return trackSender;
}
removeTrack(track: Track): boolean {
if (!(track instanceof TrackWrapper)) {
throw new Error("Not a TrackWrapper");
}
const sender = this.peerConnection.getSenders().find(s => s.track === track.track);
if (sender) {
this.peerConnection.removeTrack(sender);
return true;
}
return false;
}
async replaceTrack(oldTrack: Track, newTrack: Track): Promise<boolean> {
if (!(oldTrack instanceof TrackWrapper) || !(newTrack instanceof TrackWrapper)) {
throw new Error("Not a TrackWrapper");
}
const sender = this.peerConnection.getSenders().find(s => s.track === oldTrack.track);
if (sender) {
await sender.replaceTrack(newTrack.track);
if (newTrack.type === TrackType.ScreenShare) {
this.getRidOfRTXCodecs(newTrack);
}
return true;
}
return false;
}
notifyStreamPurposeChanged(): void {
for (const track of this.remoteTracks) {
const wrapper = track as TrackWrapper;
wrapper.setType(this.getRemoteTrackType(wrapper.track, wrapper.streamId));
removeTrack(sender: TrackSender): void {
if (!(sender instanceof DOMTrackSender)) {
throw new Error("Not a DOMTrackSender");
}
this.peerConnection.removeTrack((sender as DOMTrackSender).transceiver.sender);
// TODO: update localStreams
}
createDataChannel(options: RTCDataChannelInit): any {
@ -170,6 +307,9 @@ class DOMPeerConnection implements PeerConnection {
dispose(): void {
this.deregisterHandler();
for (const r of this.remoteStreams) {
r.stream.dispose();
}
}
private handleLocalIceCandidate(event: RTCPeerConnectionIceEvent) {
@ -187,67 +327,28 @@ class DOMPeerConnection implements PeerConnection {
}
}
onRemoteStreamEmpty = (stream: RemoteStreamWrapper): void => {
const idx = this.remoteStreams.findIndex(r => r.stream === stream);
if (idx !== -1) {
this.remoteStreams.splice(idx, 1);
this.handler.onRemoteStreamRemoved(stream);
}
}
private handleRemoteTrack(evt: RTCTrackEvent) {
// TODO: unit test this code somehow
// the tracks on the new stream (with their stream)
const updatedTracks = evt.streams.flatMap(stream => stream.getTracks().map(track => {return {stream, track};}));
// of the tracks we already know about, filter the ones that aren't in the new stream
const withoutRemovedTracks = this._remoteTracks.filter(t => updatedTracks.some(ut => t.track.id === ut.track.id));
// of the new tracks, filter the ones that we didn't already knew about
const addedTracks = updatedTracks.filter(ut => !this._remoteTracks.some(t => t.track.id === ut.track.id));
// wrap them
const wrappedAddedTracks = addedTracks.map(t => wrapTrack(t.track, t.stream, this.getRemoteTrackType(t.track, t.stream.id)));
// and concat the tracks for other streams with the added tracks
this._remoteTracks = withoutRemovedTracks.concat(...wrappedAddedTracks);
this.handler.onRemoteTracksChanged(this.remoteTracks);
}
private getRemoteTrackType(track: MediaStreamTrack, streamId: string): TrackType {
if (track.kind === "video") {
const purpose = this.handler.getPurposeForStreamId(streamId);
return purpose === SDPStreamMetadataPurpose.Usermedia ? TrackType.Camera : TrackType.ScreenShare;
} else {
return TrackType.Microphone;
if (evt.streams.length !== 0) {
throw new Error("track in multiple streams is not supported");
}
}
/**
* This method removes all video/rtx codecs from screensharing video
* transceivers. This is necessary since they can cause problems. Without
* this the following steps should produce an error:
* Chromium calls Firefox
* Firefox answers
* Firefox starts screen-sharing
* Chromium starts screen-sharing
* Call crashes for Chromium with:
* [96685:23:0518/162603.933321:ERROR:webrtc_video_engine.cc(3296)] RTX codec (PT=97) mapped to PT=96 which is not in the codec list.
* [96685:23:0518/162603.933377:ERROR:webrtc_video_engine.cc(1171)] GetChangedRecvParameters called without any video codecs.
* [96685:23:0518/162603.933430:ERROR:sdp_offer_answer.cc(4302)] Failed to set local video description recv parameters for m-section with mid='2'. (INVALID_PARAMETER)
*/
private getRidOfRTXCodecs(screensharingTrack: TrackWrapper): void {
// RTCRtpReceiver.getCapabilities and RTCRtpSender.getCapabilities don't seem to be supported on FF
if (!RTCRtpReceiver.getCapabilities || !RTCRtpSender.getCapabilities) return;
const recvCodecs = RTCRtpReceiver.getCapabilities("video")?.codecs ?? [];
const sendCodecs = RTCRtpSender.getCapabilities("video")?.codecs ?? [];
const codecs = [...sendCodecs, ...recvCodecs];
for (const codec of codecs) {
if (codec.mimeType === "video/rtx") {
const rtxCodecIndex = codecs.indexOf(codec);
codecs.splice(rtxCodecIndex, 1);
}
const stream = evt.streams[0];
const transceivers = this.peerConnection.getTransceivers();
let streamReceiver: DOMStreamReceiver | undefined = this.remoteStreams.find(r => r.stream.id === stream.id);
if (!streamReceiver) {
streamReceiver = new DOMStreamReceiver(new RemoteStreamWrapper(stream, this.onRemoteStreamEmpty));
this.remoteStreams.push(streamReceiver);
}
for (const trans of this.peerConnection.getTransceivers()) {
if (trans.sender.track === screensharingTrack.track &&
(
trans.sender.track?.kind === "video" ||
trans.receiver.track?.kind === "video"
)
) {
trans.setCodecPreferences(codecs);
}
const trackReceiver = streamReceiver.update(evt);
if (trackReceiver) {
this.handler.onRemoteTracksAdded(trackReceiver);
}
}
}