Skip to content

Commit

Permalink
Merge pull request #1972 from SimonBrandner/feature/answer-no-cam
Browse files Browse the repository at this point in the history
Try to answer a call without video if we can't access the camera
  • Loading branch information
dbkr authored Oct 14, 2021
2 parents 0a3d820 + 3aefc9f commit 6804e42
Show file tree
Hide file tree
Showing 3 changed files with 69 additions and 9 deletions.
27 changes: 27 additions & 0 deletions spec/unit/webrtc/call.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,17 @@ class MockMediaStream {
addEventListener() {}
}

class MockMediaDeviceInfo {
constructor(
public kind: "audio" | "video",
) {}
}

class MockMediaHandler {
getUserMediaStream() { return new MockMediaStream("mock_stream_from_media_handler"); }
stopUserMediaStream() {}
}

describe('Call', function() {
let client;
let call;
Expand All @@ -110,6 +121,8 @@ describe('Call', function() {
mediaDevices: {
// @ts-ignore Mock
getUserMedia: () => new MockMediaStream("local_stream"),
// @ts-ignore Mock
enumerateDevices: async () => [new MockMediaDeviceInfo("audio"), new MockMediaDeviceInfo("video")],
},
};

Expand All @@ -129,6 +142,8 @@ describe('Call', function() {
// We just stub out sendEvent: we're not interested in testing the client's
// event sending code here
client.client.sendEvent = () => {};
client.client.mediaHandler = new MockMediaHandler;
client.client.getMediaHandler = () => client.client.mediaHandler;
client.httpBackend.when("GET", "/voip/turnServer").respond(200, {});
call = new MatrixCall({
client: client.client,
Expand Down Expand Up @@ -368,4 +383,16 @@ describe('Call', function() {
call.setScreensharingEnabled(true);
expect(call.setScreensharingEnabledWithoutMetadataSupport).toHaveBeenCalled();
});

it("should fallback to answering with no video", async () => {
await client.httpBackend.flush();

call.shouldAnswerWithMediaType = (wantedValue: boolean) => wantedValue;
client.client.mediaHandler.getUserMediaStream = jest.fn().mockRejectedValue("reject");

await call.answer(true, true);

expect(client.client.mediaHandler.getUserMediaStream).toHaveBeenNthCalledWith(1, true, true);
expect(client.client.mediaHandler.getUserMediaStream).toHaveBeenNthCalledWith(2, true, false);
});
});
31 changes: 25 additions & 6 deletions src/webrtc/call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -562,8 +562,8 @@ export class MatrixCall extends EventEmitter {
this.feeds.push(new CallFeed({
client: this.client,
roomId: this.roomId,
audioMuted: false,
videoMuted: false,
audioMuted: stream.getAudioTracks().length === 0,
videoMuted: stream.getVideoTracks().length === 0,
userId,
stream,
purpose,
Expand Down Expand Up @@ -752,19 +752,30 @@ export class MatrixCall extends EventEmitter {
logger.debug(`Answering call ${this.callId}`);

if (!this.localUsermediaStream && !this.waitForLocalAVStream) {
const prevState = this.state;
const answerWithAudio = this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio");
const answerWithVideo = this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video");

this.setState(CallState.WaitLocalMedia);
this.waitForLocalAVStream = true;

try {
const mediaStream = await this.client.getMediaHandler().getUserMediaStream(
this.shouldAnswerWithMediaType(audio, this.hasRemoteUserMediaAudioTrack, "audio"),
this.shouldAnswerWithMediaType(video, this.hasRemoteUserMediaVideoTrack, "video"),
answerWithAudio, answerWithVideo,
);
this.waitForLocalAVStream = false;
this.gotUserMediaForAnswer(mediaStream);
} catch (e) {
this.getUserMediaFailed(e);
return;
if (answerWithVideo) {
// Try to answer without video
logger.warn("Failed to getUserMedia(), trying to getUserMedia() without video");
this.setState(prevState);
this.waitForLocalAVStream = false;
await this.answer(answerWithAudio, false);
} else {
this.getUserMediaFailed(e);
return;
}
}
} else if (this.waitForLocalAVStream) {
this.setState(CallState.WaitLocalMedia);
Expand Down Expand Up @@ -993,6 +1004,10 @@ export class MatrixCall extends EventEmitter {
* @returns the new mute state
*/
public async setLocalVideoMuted(muted: boolean): Promise<boolean> {
if (!await this.client.getMediaHandler().hasVideoDevice()) {
return this.isLocalVideoMuted();
}

if (!this.hasLocalUserMediaVideoTrack && !muted) {
await this.upgradeCall(false, true);
return this.isLocalVideoMuted();
Expand Down Expand Up @@ -1021,6 +1036,10 @@ export class MatrixCall extends EventEmitter {
* @returns the new mute state
*/
public async setMicrophoneMuted(muted: boolean): Promise<boolean> {
if (!await this.client.getMediaHandler().hasAudioDevice()) {
return this.isMicrophoneMuted();
}

if (!this.hasLocalUserMediaAudioTrack && !muted) {
await this.upgradeCall(true, false);
return this.isMicrophoneMuted();
Expand Down
20 changes: 17 additions & 3 deletions src/webrtc/mediaHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,23 +43,37 @@ export class MediaHandler {
this.videoInput = deviceId;
}

public async hasAudioDevice(): Promise<boolean> {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === "audioinput").length > 0;
}

public async hasVideoDevice(): Promise<boolean> {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === "videoinput").length > 0;
}

/**
* @returns {MediaStream} based on passed parameters
*/
public async getUserMediaStream(audio: boolean, video: boolean): Promise<MediaStream> {
const shouldRequestAudio = audio && await this.hasAudioDevice();
const shouldRequestVideo = video && await this.hasVideoDevice();

let stream: MediaStream;

// Find a stream with matching tracks
const matchingStream = this.userMediaStreams.find((stream) => {
if (audio !== (stream.getAudioTracks().length > 0)) return false;
if (video !== (stream.getVideoTracks().length > 0)) return false;
if (shouldRequestAudio !== (stream.getAudioTracks().length > 0)) return false;
if (shouldRequestVideo !== (stream.getVideoTracks().length > 0)) return false;
return true;
});

if (matchingStream) {
logger.log("Cloning user media stream", matchingStream.id);
stream = matchingStream.clone();
} else {
const constraints = this.getUserMediaContraints(audio, video);
const constraints = this.getUserMediaContraints(shouldRequestAudio, shouldRequestVideo);
logger.log("Getting user media with constraints", constraints);
stream = await navigator.mediaDevices.getUserMedia(constraints);
}
Expand Down

0 comments on commit 6804e42

Please sign in to comment.