From ce94dbb2b839500507668d3ddf898f14b86533a1 Mon Sep 17 00:00:00 2001 From: bgrozev Date: Thu, 15 Sep 2022 16:51:44 -0500 Subject: [PATCH] Remove MultiStreamConfig, always enable multi-stream mode. (#1948) * Remove MultiStreamConfig, always enable multi-stream mode. * ref: Remove "2" from class and function names. * squash: Remove "new" from class names. --- .../jitsi/videobridge/AbstractEndpoint.java | 115 +- .../videobridge/EndpointMessageTransport.java | 37 +- .../cc/allocation/BandwidthAllocator.java | 193 +-- .../jitsi/videobridge/shim/ChannelShim.java | 15 +- .../videobridge/xmpp/MediaSourceFactory.java | 17 +- .../kotlin/org/jitsi/videobridge/Endpoint.kt | 79 +- .../jitsi/videobridge/MultiStreamConfig.kt | 29 - .../cc/allocation/AllocationSettings.kt | 85 +- .../cc/allocation/BandwidthAllocation.kt | 6 +- .../cc/allocation/BitrateController.kt | 27 +- .../videobridge/cc/allocation/Prioritize.kt | 49 +- .../cc/allocation/SingleSourceAllocation.kt | 33 +- .../cc/allocation/SingleSourceAllocation2.kt | 336 ---- .../org/jitsi/videobridge/relay/Relay.kt | 29 +- .../relay/RelayMessageTransport.kt | 54 +- .../videobridge/relay/RelayedEndpoint.kt | 14 +- jvb/src/main/resources/reference.conf | 5 - .../videobridge/MediaSourceFactoryTest.kt | 23 +- .../videobridge/MultiStreamConfigTest.kt | 46 - .../cc/allocation/BitrateControllerNewTest.kt | 1501 ----------------- .../allocation/BitrateControllerPerfTest.kt | 26 + .../cc/allocation/BitrateControllerTest.kt | 430 ++--- .../allocation/EffectiveConstraintsNewTest.kt | 191 --- .../cc/allocation/EffectiveConstraintsTest.kt | 137 +- .../allocation/SingleSourceAllocation2Test.kt | 303 ---- .../allocation/SingleSourceAllocationTest.kt | 149 +- 26 files changed, 569 insertions(+), 3360 deletions(-) delete mode 100644 jvb/src/main/kotlin/org/jitsi/videobridge/MultiStreamConfig.kt delete mode 100644 jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2.kt delete mode 100644 jvb/src/test/kotlin/org/jitsi/videobridge/MultiStreamConfigTest.kt delete mode 100644 jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerNewTest.kt delete mode 100644 jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsNewTest.kt delete mode 100644 jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2Test.kt diff --git a/jvb/src/main/java/org/jitsi/videobridge/AbstractEndpoint.java b/jvb/src/main/java/org/jitsi/videobridge/AbstractEndpoint.java index 59a30dc616..60998e609d 100644 --- a/jvb/src/main/java/org/jitsi/videobridge/AbstractEndpoint.java +++ b/jvb/src/main/java/org/jitsi/videobridge/AbstractEndpoint.java @@ -20,14 +20,12 @@ import org.jitsi.nlj.format.*; import org.jitsi.nlj.rtp.*; import org.jitsi.nlj.util.*; -import org.jitsi.utils.*; import org.jitsi.utils.event.*; import org.jitsi.utils.logging2.*; import org.jitsi.videobridge.cc.allocation.*; import org.jitsi.xmpp.extensions.colibri.*; import org.json.simple.*; -import java.io.*; import java.time.*; import java.util.*; @@ -61,15 +59,10 @@ public abstract class AbstractEndpoint */ private final Conference conference; - /** - * The map of receiver endpoint id -> video constraints. - */ - private final ReceiverConstraintsMap receiverVideoConstraintsMap = new ReceiverConstraintsMap(); - /** * The map of source name -> ReceiverConstraintsMap. */ - private final Map receiverVideoConstraintsMapV2 = new HashMap<>(); + private final Map receiverVideoConstraints = new HashMap<>(); /** * The statistic Id of this Endpoint. @@ -248,6 +241,8 @@ public String getStatsId() * * @return the (unique) identifier/ID of this instance */ + @NotNull + @Override public final String getId() { return id; @@ -380,49 +375,20 @@ public JSONObject getDebugState() { JSONObject debugState = new JSONObject(); - if (MultiStreamConfig.config.getEnabled()) - { - JSONObject receiverVideoConstraints = new JSONObject(); + JSONObject receiverVideoConstraints = new JSONObject(); - receiverVideoConstraintsMapV2.forEach( - (sourceName, receiverConstraints) -> - receiverVideoConstraints.put(sourceName, receiverConstraints.getDebugState())); + this.receiverVideoConstraints.forEach( + (sourceName, receiverConstraints) -> + receiverVideoConstraints.put(sourceName, receiverConstraints.getDebugState())); - debugState.put("receiverVideoConstraints", receiverVideoConstraints); - debugState.put("maxReceiverVideoConstraintsMap", new HashMap<>(maxReceiverVideoConstraintsMap)); - } - else - { - debugState.put("maxReceiverVideoConstraints", maxReceiverVideoConstraints); - debugState.put("receiverVideoConstraints", receiverVideoConstraintsMap.getDebugState()); - } + debugState.put("receiverVideoConstraints", receiverVideoConstraints); + debugState.put("maxReceiverVideoConstraintsMap", new HashMap<>(maxReceiverVideoConstraintsMap)); debugState.put("expired", expired); debugState.put("statsId", statsId); return debugState; } - /** - * Computes and sets the {@link #maxReceiverVideoConstraints} from the - * specified video constraints. - * - * @param newMaxHeight the maximum height resulting from the current set of constraints. - * (Currently we only support constraining the height, and not frame rate.) - */ - private void receiverVideoConstraintsChanged(int newMaxHeight) - { - VideoConstraints oldReceiverMaxVideoConstraints = this.maxReceiverVideoConstraints; - - - VideoConstraints newReceiverMaxVideoConstraints = new VideoConstraints(newMaxHeight, -1.0); - - if (!newReceiverMaxVideoConstraints.equals(oldReceiverMaxVideoConstraints)) - { - maxReceiverVideoConstraints = newReceiverMaxVideoConstraints; - sendVideoConstraints(newReceiverMaxVideoConstraints); - } - } - /** * Computes and sets the {@link #maxReceiverVideoConstraints} from the specified video constraints of the media * source identified by the given source name. @@ -431,7 +397,7 @@ private void receiverVideoConstraintsChanged(int newMaxHeight) * @param newMaxHeight the maximum height resulting from the current set of constraints. * (Currently we only support constraining the height, and not frame rate.) */ - private void receiverVideoConstraintsChangedV2(String sourceName, int newMaxHeight) + private void receiverVideoConstraintsChanged(String sourceName, int newMaxHeight) { VideoConstraints oldReceiverMaxVideoConstraints = this.maxReceiverVideoConstraintsMap.get(sourceName); @@ -487,28 +453,6 @@ private void receiverVideoConstraintsChangedV2(String sourceName, int newMaxHeig protected abstract void sendVideoConstraintsV2(@NotNull String sourceName, @NotNull VideoConstraints maxVideoConstraints); - /** - * Notifies this instance that a specified received wants to receive - * the specified video constraints from the endpoint attached to this - * instance (the sender). - * - * The receiver can be either another endpoint, or a remote bridge. - * - * @param receiverId the id that specifies the receiver endpoint - * @param newVideoConstraints the video constraints that the receiver - * wishes to receive. - */ - public void addReceiver(String receiverId, VideoConstraints newVideoConstraints) - { - VideoConstraints oldVideoConstraints = receiverVideoConstraintsMap.put(receiverId, newVideoConstraints); - if (oldVideoConstraints == null || !oldVideoConstraints.equals(newVideoConstraints)) - { - logger.debug( - () -> "Changed receiver constraints: " + receiverId + ": " + newVideoConstraints.getMaxHeight()); - receiverVideoConstraintsChanged(receiverVideoConstraintsMap.getMaxHeight()); - } - } - /** * Notifies this instance that a specified received wants to receive the specified video constraints from the media * source with the given source name. @@ -519,18 +463,18 @@ public void addReceiver(String receiverId, VideoConstraints newVideoConstraints) * @param sourceName the name of the media source for which the constraints are to be applied. * @param newVideoConstraints the video constraints that the receiver wishes to receive. */ - public void addReceiverV2( + public void addReceiver( @NotNull String receiverId, @NotNull String sourceName, @NotNull VideoConstraints newVideoConstraints ) { - ReceiverConstraintsMap sourceConstraints = receiverVideoConstraintsMapV2.get(sourceName); + ReceiverConstraintsMap sourceConstraints = receiverVideoConstraints.get(sourceName); if (sourceConstraints == null) { sourceConstraints = new ReceiverConstraintsMap(); - receiverVideoConstraintsMapV2.put(sourceName, sourceConstraints); + receiverVideoConstraints.put(sourceName, sourceConstraints); } VideoConstraints oldVideoConstraints = sourceConstraints.put(receiverId, newVideoConstraints); @@ -540,7 +484,7 @@ public void addReceiverV2( logger.debug( () -> "Changed receiver constraints: " + receiverId + "->" + sourceName + ": " + newVideoConstraints.getMaxHeight()); - receiverVideoConstraintsChangedV2(sourceName, sourceConstraints.getMaxHeight()); + receiverVideoConstraintsChanged(sourceName, sourceConstraints.getMaxHeight()); } } @@ -553,27 +497,16 @@ public void addReceiverV2( */ public void removeReceiver(String receiverId) { - if (MultiStreamConfig.config.getEnabled()) + for (Map.Entry sourceConstraintsEntry + : receiverVideoConstraints.entrySet()) { - for (Map.Entry sourceConstraintsEntry - : receiverVideoConstraintsMapV2.entrySet()) - { - String sourceName = sourceConstraintsEntry.getKey(); - ReceiverConstraintsMap sourceConstraints = sourceConstraintsEntry.getValue(); - - if (sourceConstraints.remove(receiverId) != null) - { - logger.debug(() -> "Removed receiver " + receiverId + " for " + sourceName); - receiverVideoConstraintsChangedV2(sourceName, sourceConstraints.getMaxHeight()); - } - } - } - else - { - if (receiverVideoConstraintsMap.remove(receiverId) != null) + String sourceName = sourceConstraintsEntry.getKey(); + ReceiverConstraintsMap sourceConstraints = sourceConstraintsEntry.getValue(); + + if (sourceConstraints.remove(receiverId) != null) { - logger.debug(() -> "Removed receiver " + receiverId); - receiverVideoConstraintsChanged(receiverVideoConstraintsMap.getMaxHeight()); + logger.debug(() -> "Removed receiver " + receiverId + " for " + sourceName); + receiverVideoConstraintsChanged(sourceName, sourceConstraints.getMaxHeight()); } } } @@ -587,14 +520,14 @@ public void removeReceiver(String receiverId) */ public void removeSourceReceiver(String receiverId, String sourceName) { - ReceiverConstraintsMap sourceConstraints = receiverVideoConstraintsMapV2.get(sourceName); + ReceiverConstraintsMap sourceConstraints = receiverVideoConstraints.get(sourceName); if (sourceConstraints != null) { if (sourceConstraints.remove(receiverId) != null) { logger.debug(() -> "Removed receiver " + receiverId + " for " + sourceName); - receiverVideoConstraintsChangedV2(sourceName, sourceConstraints.getMaxHeight()); + receiverVideoConstraintsChanged(sourceName, sourceConstraints.getMaxHeight()); } } } diff --git a/jvb/src/main/java/org/jitsi/videobridge/EndpointMessageTransport.java b/jvb/src/main/java/org/jitsi/videobridge/EndpointMessageTransport.java index a97be4a024..f8d9832e16 100644 --- a/jvb/src/main/java/org/jitsi/videobridge/EndpointMessageTransport.java +++ b/jvb/src/main/java/org/jitsi/videobridge/EndpointMessageTransport.java @@ -129,44 +129,17 @@ public BridgeChannelMessage clientHello(ClientHelloMessage message) @Override public BridgeChannelMessage videoType(VideoTypeMessage videoTypeMessage) { - if (MultiStreamConfig.config.getEnabled()) - { - sourceVideoType( + return sourceVideoType( new SourceVideoTypeMessage( - videoTypeMessage.getVideoType(), - endpointIdToSourceName(endpoint.getId()), - videoTypeMessage.getEndpointId()) - ); - - return null; - } - - endpoint.setVideoType(videoTypeMessage.getVideoType()); - - Conference conference = endpoint.getConference(); - - if (conference == null || conference.isExpired()) - { - getLogger().warn("Unable to forward VideoTypeMessage, conference is null or expired"); - return null; - } - - videoTypeMessage.setEndpointId(endpoint.getId()); - - /* Forward videoType messages to Relays. */ - conference.sendMessage(videoTypeMessage, Collections.emptyList(), true); - - return null; + videoTypeMessage.getVideoType(), + endpointIdToSourceName(endpoint.getId()), + videoTypeMessage.getEndpointId()) + ); } @Override public BridgeChannelMessage sourceVideoType(SourceVideoTypeMessage sourceVideoTypeMessage) { - if (!MultiStreamConfig.config.getEnabled()) - { - return null; - } - String sourceName = sourceVideoTypeMessage.getSourceName(); if (getLogger().isDebugEnabled()) diff --git a/jvb/src/main/java/org/jitsi/videobridge/cc/allocation/BandwidthAllocator.java b/jvb/src/main/java/org/jitsi/videobridge/cc/allocation/BandwidthAllocator.java index eb41537a28..706670cd8c 100644 --- a/jvb/src/main/java/org/jitsi/videobridge/cc/allocation/BandwidthAllocator.java +++ b/jvb/src/main/java/org/jitsi/videobridge/cc/allocation/BandwidthAllocator.java @@ -22,13 +22,11 @@ import org.jitsi.utils.event.*; import org.jitsi.utils.logging.*; import org.jitsi.utils.logging2.Logger; -import org.jitsi.videobridge.*; import org.jitsi.videobridge.cc.config.*; import org.jitsi.videobridge.util.*; import org.json.simple.*; import java.lang.*; -import java.lang.Deprecated; import java.lang.SuppressWarnings; import java.time.*; import java.util.*; @@ -37,7 +35,6 @@ import java.util.stream.*; import static org.jitsi.videobridge.cc.allocation.PrioritizeKt.prioritize; -import static org.jitsi.videobridge.cc.allocation.PrioritizeKt.prioritize2; import static org.jitsi.videobridge.cc.allocation.VideoConstraintsKt.prettyPrint; /** @@ -252,48 +249,30 @@ synchronized void update() BandwidthAllocation newAllocation; Map oldEffectiveConstraints; - if (MultiStreamConfig.config.getEnabled()) - { - // Order the sources by selection, followed by Endpoint's speech activity. - List sources + // Order the sources by selection, followed by Endpoint's speech activity. + List sources = endpointsSupplier.get() - .stream() - .flatMap(endpoint -> Arrays.stream(endpoint.getMediaSources())) - .collect(Collectors.toList()); - List sortedSources = prioritize2(sources, getSelectedSources()); - - // Extract and update the effective constraints. - oldEffectiveConstraints = effectiveConstraints; - effectiveConstraints = PrioritizeKt.getEffectiveConstraints2(sortedSources, allocationSettings); - logger.trace(() -> + .stream() + .flatMap(endpoint -> Arrays.stream(endpoint.getMediaSources())) + .collect(Collectors.toList()); + List sortedSources = prioritize(sources, getSelectedSources()); + + // Extract and update the effective constraints. + oldEffectiveConstraints = effectiveConstraints; + effectiveConstraints = PrioritizeKt.getEffectiveConstraints(sortedSources, allocationSettings); + logger.trace(() -> "Allocating: sortedSources=" - + sortedSources.stream().map(MediaSourceDesc::getSourceName).collect(Collectors.joining(",")) - + " effectiveConstraints=" + prettyPrint(effectiveConstraints)); + + sortedSources.stream().map(MediaSourceDesc::getSourceName).collect(Collectors.joining(",")) + + " effectiveConstraints=" + prettyPrint(effectiveConstraints)); - // Compute the bandwidth allocation. - newAllocation = allocate2(sortedSources); + // Compute the bandwidth allocation. + newAllocation = allocate(sortedSources); - eventEmitter.fireEvent(handler -> { - handler.sourceListChanged(sortedSources); - return Unit.INSTANCE; - }); - } - else + eventEmitter.fireEvent(handler -> { - // Order the endpoints by selection, followed by speech activity. - List sortedEndpoints = prioritize(endpointsSupplier.get(), getSelectedEndpoints()); - - // Extract and update the effective constraints. - oldEffectiveConstraints = effectiveConstraints; - effectiveConstraints = PrioritizeKt.getEffectiveConstraints(sortedEndpoints, allocationSettings); - logger.trace(() -> - "Allocating: sortedEndpoints=" - + sortedEndpoints.stream().map(T::getId).collect(Collectors.joining(",")) - + " effectiveConstraints=" + prettyPrint(effectiveConstraints)); - - // Compute the bandwidth allocation. - newAllocation = allocate(sortedEndpoints); - } + handler.sourceListChanged(sortedSources); + return Unit.INSTANCE; + }); boolean allocationChanged = !allocation.isTheSameAs(newAllocation); if (allocationChanged) @@ -318,21 +297,6 @@ synchronized void update() } } - @Deprecated - private List getSelectedEndpoints() - { - // On-stage participants are considered selected (with higher prio). - List selectedEndpoints = new ArrayList<>(allocationSettings.getOnStageEndpoints()); - allocationSettings.getSelectedEndpoints().forEach(selectedEndpoint -> - { - if (!selectedEndpoints.contains(selectedEndpoint)) - { - selectedEndpoints.add(selectedEndpoint); - } - }); - return selectedEndpoints; - } - private List getSelectedSources() { // On-stage sources are considered selected (with higher priority). @@ -347,75 +311,6 @@ private List getSelectedSources() return selectedSources; } - /** - * Implements the bandwidth allocation algorithm for the given ordered list of endpoints. - * - * @param conferenceEndpoints the list of endpoints in order of priority to allocate for. - * @return the new {@link BandwidthAllocation}. - */ - private synchronized @NotNull BandwidthAllocation allocate(List conferenceEndpoints) - { - List sourceBitrateAllocations = createAllocations(conferenceEndpoints); - - if (sourceBitrateAllocations.isEmpty()) - { - return new BandwidthAllocation(Collections.emptySet()); - } - - long remainingBandwidth = getAvailableBandwidth(); - long oldRemainingBandwidth = -1; - - boolean oversending = false; - while (oldRemainingBandwidth != remainingBandwidth) - { - oldRemainingBandwidth = remainingBandwidth; - - for (int i = 0; i < sourceBitrateAllocations.size(); i++) - { - SingleSourceAllocation sourceBitrateAllocation = sourceBitrateAllocations.get(i); - if (sourceBitrateAllocation.getConstraints().isDisabled()) - { - continue; - } - - // In stage view improve greedily until preferred, in tile view go step-by-step. - remainingBandwidth -= sourceBitrateAllocation.improve(remainingBandwidth, i == 0); - if (remainingBandwidth < 0) - { - oversending = true; - } - - // In stage view, do not allocate bandwidth for thumbnails until the on-stage reaches "preferred". - // This prevents enabling thumbnail only to disable them when bwe slightly increases allowing on-stage - // to take more. - if (sourceBitrateAllocation.isOnStage() && !sourceBitrateAllocation.hasReachedPreferred()) - { - break; - } - } - } - - // The endpoints which are in lastN, and are sending video, but were suspended due to bwe. - List suspendedIds = sourceBitrateAllocations.stream() - .filter(SingleSourceAllocation::isSuspended) - .map(ssa -> ssa.getEndpoint().getId()).collect(Collectors.toList()); - if (!suspendedIds.isEmpty()) - { - logger.info("Endpoints were suspended due to insufficient bandwidth (bwe=" - + getAvailableBandwidth() + " bps): " + String.join(",", suspendedIds)); - } - - Set allocations = new HashSet<>(); - - long targetBps = 0, idealBps = 0; - for (SingleSourceAllocation sourceBitrateAllocation : sourceBitrateAllocations) { - allocations.add(sourceBitrateAllocation.getResult()); - targetBps += sourceBitrateAllocation.getTargetBitrate(); - idealBps += sourceBitrateAllocation.getIdealBitrate(); - } - return new BandwidthAllocation(allocations, oversending, idealBps, targetBps, suspendedIds); - } - /** * Implements the bandwidth allocation algorithm for the given ordered list of media sources. * @@ -424,9 +319,9 @@ private List getSelectedSources() * @param conferenceMediaSources the list of endpoint media sources in order of priority to allocate for. * @return the new {@link BandwidthAllocation}. */ - private synchronized @NotNull BandwidthAllocation allocate2(List conferenceMediaSources) + private synchronized @NotNull BandwidthAllocation allocate(List conferenceMediaSources) { - List sourceBitrateAllocations = createAllocations2(conferenceMediaSources); + List sourceBitrateAllocations = createAllocations(conferenceMediaSources); if (sourceBitrateAllocations.isEmpty()) { @@ -443,7 +338,7 @@ private List getSelectedSources() for (int i = 0; i < sourceBitrateAllocations.size(); i++) { - SingleSourceAllocation2 sourceBitrateAllocation = sourceBitrateAllocations.get(i); + SingleSourceAllocation sourceBitrateAllocation = sourceBitrateAllocations.get(i); if (sourceBitrateAllocation.getConstraints().isDisabled()) { continue; @@ -468,8 +363,8 @@ private List getSelectedSources() // The sources which are in lastN, and are sending video, but were suspended due to bwe. List suspendedIds = sourceBitrateAllocations.stream() - .filter(SingleSourceAllocation2::isSuspended) - .map(SingleSourceAllocation2::getMediaSource) + .filter(SingleSourceAllocation::isSuspended) + .map(SingleSourceAllocation::getMediaSource) .map(MediaSourceDesc::getSourceName) .collect(Collectors.toList()); if (!suspendedIds.isEmpty()) @@ -481,7 +376,7 @@ private List getSelectedSources() Set allocations = new HashSet<>(); long targetBps = 0, idealBps = 0; - for (SingleSourceAllocation2 sourceBitrateAllocation : sourceBitrateAllocations) { + for (SingleSourceAllocation sourceBitrateAllocation : sourceBitrateAllocations) { allocations.add(sourceBitrateAllocation.getResult()); targetBps += sourceBitrateAllocation.getTargetBitrate(); idealBps += sourceBitrateAllocation.getIdealBitrate(); @@ -511,49 +406,17 @@ public boolean hasNonZeroEffectiveConstraints(String endpointId) return !constraints.isDisabled(); } - private synchronized @NotNull List createAllocations(List conferenceEndpoints) - { - // Init. - List sourceBitrateAllocations = new ArrayList<>(conferenceEndpoints.size()); - - for (MediaSourceContainer endpoint : conferenceEndpoints) - { - MediaSourceDesc source = endpoint.getMediaSource(); - - if (source != null) - { - sourceBitrateAllocations.add( - new SingleSourceAllocation( - endpoint, - // Note that we use the effective constraints and not the receiver's constraints - // directly. This means we never even try to allocate bitrate to endpoints "outside - // lastN". For example, if LastN=1 and the first endpoint sends a non-scalable - // stream with bitrate higher that the available bandwidth, we will forward no - // video at all instead of going to the second endpoint in the list. - // I think this is not desired behavior. However, it is required for the "effective - // constraints" to work as designed. - effectiveConstraints.get(endpoint.getId()), - allocationSettings.getOnStageEndpoints().contains(endpoint.getId()), - diagnosticContext, - clock, - logger)); - } - } - - return sourceBitrateAllocations; - } - // The new version which works with multiple streams per endpoint. - private synchronized @NotNull List createAllocations2( + private synchronized @NotNull List createAllocations( List conferenceMediaSources) { // Init. - List sourceBitrateAllocations = new ArrayList<>(conferenceMediaSources.size()); + List sourceBitrateAllocations = new ArrayList<>(conferenceMediaSources.size()); for (MediaSourceDesc source : conferenceMediaSources) { sourceBitrateAllocations.add( - new SingleSourceAllocation2( + new SingleSourceAllocation( source.getOwner(), source, // Note that we use the effective constraints and not the receiver's constraints diff --git a/jvb/src/main/java/org/jitsi/videobridge/shim/ChannelShim.java b/jvb/src/main/java/org/jitsi/videobridge/shim/ChannelShim.java index 7783ce0200..79953ee895 100644 --- a/jvb/src/main/java/org/jitsi/videobridge/shim/ChannelShim.java +++ b/jvb/src/main/java/org/jitsi/videobridge/shim/ChannelShim.java @@ -280,21 +280,18 @@ public void describe(ColibriConferenceIQ.ChannelCommon commonIq) public void setSources(@NotNull List sources) throws IqProcessingException { - if (MultiStreamConfig.config.getEnabled()) + for (SourcePacketExtension s : sources) { - for (SourcePacketExtension s: sources) - { - if ((s.hasSSRC() || s.hasRid()) // Enforce sourceName only if it's not an empty source [1] + if ((s.hasSSRC() || s.hasRid()) // Enforce sourceName only if it's not an empty source [1] // TODO use Kotlin "isNullOrEmpty" once ported to Kotlin && (s.getName() == null || s.getName().trim().isEmpty())) - { - throw new IqProcessingException( + { + throw new IqProcessingException( StanzaError.Condition.bad_request, "The name attribute is required for " + s); - } - // [1]: An empty source can be used to signal "remove all sources": - // https://github.com/jitsi/jitsi-xmpp-extensions/blob/b208e4bc96de30adae14f86515934293cd203138/src/main/java/org/jitsi/xmpp/extensions/colibri/ColibriBuilder.java#L157 } + // [1]: An empty source can be used to signal "remove all sources": + // https://github.com/jitsi/jitsi-xmpp-extensions/blob/b208e4bc96de30adae14f86515934293cd203138/src/main/java/org/jitsi/xmpp/extensions/colibri/ColibriBuilder.java#L157 } this.sources = sources; diff --git a/jvb/src/main/java/org/jitsi/videobridge/xmpp/MediaSourceFactory.java b/jvb/src/main/java/org/jitsi/videobridge/xmpp/MediaSourceFactory.java index 8bdb3cc4ea..a2a8fe55a6 100644 --- a/jvb/src/main/java/org/jitsi/videobridge/xmpp/MediaSourceFactory.java +++ b/jvb/src/main/java/org/jitsi/videobridge/xmpp/MediaSourceFactory.java @@ -15,11 +15,9 @@ */ package org.jitsi.videobridge.xmpp; -import org.jetbrains.annotations.*; import org.jitsi.nlj.*; import org.jitsi.nlj.rtp.*; import org.jitsi.utils.logging2.*; -import org.jitsi.videobridge.MultiStreamConfig; import org.jitsi.xmpp.extensions.colibri.*; import org.jitsi.xmpp.extensions.jingle.*; import org.jitsi.xmpp.extensions.jitsimeet.*; @@ -812,16 +810,13 @@ private static MediaSourceDesc createSource( // TODO once multi stream, becomes the default, make a change to MediaStreamDesc, so that owner and name are // not optional (there's no good reason for that). Then the error will be thrown automatically by Kotlin. - if (MultiStreamConfig.config.getEnabled()) + if (name == null) { - if (name == null) - { - throw new IllegalArgumentException("The 'name' is missing in the source description"); - } - if (owner == null) - { - throw new IllegalArgumentException("The 'owner' is missing in the source description"); - } + throw new IllegalArgumentException("The 'name' is missing in the source description"); + } + if (owner == null) + { + throw new IllegalArgumentException("The 'owner' is missing in the source description"); } MediaSourceDesc source = new MediaSourceDesc(encodings, owner, name); diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/Endpoint.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/Endpoint.kt index 2cbd20fd31..38ab81f3e9 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/Endpoint.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/Endpoint.kt @@ -347,9 +347,7 @@ class Endpoint @JvmOverloads constructor( override var mediaSources: Array get() = transceiver.getMediaSources() set(value) { - if (MultiStreamConfig.config.enabled) { - applyVideoTypeCache(value) - } + applyVideoTypeCache(value) val wasEmpty = transceiver.getMediaSources().isEmpty() if (transceiver.setMediaSources(value)) { eventEmitter.fireEvent { sourcesChanged() } @@ -531,12 +529,8 @@ class Endpoint @JvmOverloads constructor( } private fun sendAllVideoConstraints() { - if (MultiStreamConfig.config.enabled) { - maxReceiverVideoConstraintsMap.forEach { (sourceName, constraints) -> - sendVideoConstraintsV2(sourceName, constraints) - } - } else { - sendVideoConstraints(maxReceiverVideoConstraints) + maxReceiverVideoConstraintsMap.forEach { (sourceName, constraints) -> + sendVideoConstraintsV2(sourceName, constraints) } } @@ -548,37 +542,9 @@ class Endpoint @JvmOverloads constructor( fun dtlsAppPacketReceived(data: ByteArray, off: Int, len: Int) = sctpHandler.processPacket(PacketInfo(UnparsedPacket(data, off, len))) - fun effectiveVideoConstraintsChanged( - oldEffectiveConstraints: Map, - newEffectiveConstraints: Map - ) { - if (MultiStreamConfig.config.enabled) { - effectiveVideoConstraintsChangedV2(oldEffectiveConstraints, newEffectiveConstraints) - } else { - effectiveVideoConstraintsChangedV1(oldEffectiveConstraints, newEffectiveConstraints) - } - } - @Deprecated("", ReplaceWith("effectiveVideoConstraintsChangedV2"), DeprecationLevel.WARNING) - private fun effectiveVideoConstraintsChangedV1( - oldEffectiveConstraints: Map, - newEffectiveConstraints: Map - ) { - val removedEndpoints = oldEffectiveConstraints.keys.filterNot { it in newEffectiveConstraints.keys } - // Sources that "this" endpoint no longer receives. - for (removedEpId in removedEndpoints) { - // Remove ourself as a receiver from that endpoint - conference.getEndpoint(removedEpId)?.removeReceiver(id) - } - - // Added or updated - newEffectiveConstraints.forEach { (epId, effectiveConstraints) -> - conference.getEndpoint(epId)?.addReceiver(id, effectiveConstraints) - } - } - - private fun effectiveVideoConstraintsChangedV2( + fun effectiveVideoConstraintsChanged( oldEffectiveConstraints: Map, newEffectiveConstraints: Map ) { @@ -592,7 +558,7 @@ class Endpoint @JvmOverloads constructor( // Added or updated newEffectiveConstraints.forEach { (sourceName, effectiveConstraints) -> - conference.findSourceOwner(sourceName)?.addReceiverV2(id, sourceName, effectiveConstraints) + conference.findSourceOwner(sourceName)?.addReceiver(id, sourceName, effectiveConstraints) } } @@ -763,7 +729,7 @@ class Endpoint @JvmOverloads constructor( */ @Deprecated("", ReplaceWith("sendForwardedSourcesMessage"), DeprecationLevel.WARNING) fun sendForwardedEndpointsMessage(forwardedEndpoints: Collection) { - if (MultiStreamConfig.config.enabled && isUsingSourceNames) { + if (isUsingSourceNames) { return } @@ -784,7 +750,7 @@ class Endpoint @JvmOverloads constructor( * @param forwardedSources the collection of forwarded media sources (by name). */ fun sendForwardedSourcesMessage(forwardedSources: Collection) { - if (!MultiStreamConfig.config.enabled || !isUsingSourceNames) { + if (!isUsingSourceNames) { return } @@ -1066,19 +1032,14 @@ class Endpoint @JvmOverloads constructor( buildString { append("wantsStatsFrom(${ep.id}): isRecentSpeaker=${conference.speechActivity.isRecentSpeaker(ep)} ") append("isRankedSpeaker=${conference.isRankedSpeaker(ep)} ") - if (MultiStreamConfig.config.enabled) { - if (ep.mediaSources.isEmpty()) { - append("(no media sources)") - } - ep.mediaSources.forEach { source -> - val name = source.sourceName - append("isOnStageOrSelected($name)=${bitrateController.isOnStageOrSelected(source)} ") - append("hasNonZeroEffectiveConstraints($name)=") - append("${bitrateController.hasNonZeroEffectiveConstraints(source)} ") - } - } else { - append("isOnStageOrSelected=${bitrateController.isOnStageOrSelected(ep)} ") - append("hasNonZeroEffectiveConstraints=${bitrateController.hasNonZeroEffectiveConstraints(ep)}") + if (ep.mediaSources.isEmpty()) { + append("(no media sources)") + } + ep.mediaSources.forEach { source -> + val name = source.sourceName + append("isOnStageOrSelected($name)=${bitrateController.isOnStageOrSelected(source)} ") + append("hasNonZeroEffectiveConstraints($name)=") + append("${bitrateController.hasNonZeroEffectiveConstraints(source)} ") } } } @@ -1086,13 +1047,9 @@ class Endpoint @JvmOverloads constructor( if (conference.speechActivity.isRecentSpeaker(ep) || conference.isRankedSpeaker(ep)) { return true } - return if (MultiStreamConfig.config.enabled) { - ep.mediaSources.any { source -> - bitrateController.isOnStageOrSelected(source) || - bitrateController.hasNonZeroEffectiveConstraints(source) - } - } else { - bitrateController.isOnStageOrSelected(ep) || bitrateController.hasNonZeroEffectiveConstraints(ep) + return ep.mediaSources.any { source -> + bitrateController.isOnStageOrSelected(source) || + bitrateController.hasNonZeroEffectiveConstraints(source) } } diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/MultiStreamConfig.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/MultiStreamConfig.kt deleted file mode 100644 index 3acdbd6a49..0000000000 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/MultiStreamConfig.kt +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright @ 2021 - present 8x8, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jitsi.videobridge - -import org.jitsi.config.JitsiConfig -import org.jitsi.metaconfig.config -import org.jitsi.metaconfig.from - -class MultiStreamConfig private constructor() { - val enabled: Boolean by config("videobridge.multi-stream.enabled".from(JitsiConfig.newConfig)) - - companion object { - @JvmField - val config = MultiStreamConfig() - } -} diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/AllocationSettings.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/AllocationSettings.kt index 020868d874..90e24ceed5 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/AllocationSettings.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/AllocationSettings.kt @@ -17,7 +17,6 @@ package org.jitsi.videobridge.cc.allocation import org.jitsi.utils.OrderedJsonObject -import org.jitsi.videobridge.MultiStreamConfig import org.jitsi.videobridge.cc.config.BitrateControllerConfig.Companion.config import org.jitsi.videobridge.message.ReceiverVideoConstraintsMessage import org.jitsi.videobridge.util.endpointIdToSourceName @@ -37,13 +36,8 @@ data class AllocationSettings @JvmOverloads constructor( val defaultConstraints: VideoConstraints ) { fun toJson() = OrderedJsonObject().apply { - if (MultiStreamConfig.config.enabled) { - put("on_stage_sources", onStageSources) - put("selected_sources", selectedSources) - } else { - put("on_stage_endpoints", onStageEndpoints) - put("selected_endpoints", selectedEndpoints) - } + put("on_stage_sources", onStageSources) + put("selected_sources", selectedSources) put("video_constraints", videoConstraints) put("last_n", lastN) put("default_constraints", defaultConstraints) @@ -83,25 +77,13 @@ internal class AllocationSettingsWrapper(private val useSourceNames: Boolean) { private var allocationSettings = create() - private fun create(): AllocationSettings { - if (MultiStreamConfig.config.enabled) { - return AllocationSettings( - onStageSources = onStageSources, - selectedSources = selectedSources, - videoConstraints = videoConstraints, - defaultConstraints = defaultConstraints, - lastN = lastN - ) - } else { - return AllocationSettings( - onStageEndpoints = onStageEndpoints, - selectedEndpoints = selectedEndpoints, - videoConstraints = videoConstraints, - defaultConstraints = defaultConstraints, - lastN = lastN - ) - } - } + private fun create(): AllocationSettings = AllocationSettings( + onStageSources = onStageSources, + selectedSources = selectedSources, + videoConstraints = videoConstraints, + defaultConstraints = defaultConstraints, + lastN = lastN + ) fun get() = allocationSettings @@ -114,46 +96,31 @@ internal class AllocationSettingsWrapper(private val useSourceNames: Boolean) { changed = true } } - if (MultiStreamConfig.config.enabled) { - if (useSourceNames) { - message.selectedSources?.let { - if (selectedSources != it) { - selectedSources = it - changed = true - } - } - message.onStageSources?.let { - if (onStageSources != it) { - onStageSources = it - changed = true - } - } - } else { - message.selectedEndpoints?.let { - val newSelectedSources = it.map { endpoint -> endpointIdToSourceName(endpoint) } - if (selectedSources != newSelectedSources) { - selectedSources = newSelectedSources - changed = true - } + if (useSourceNames) { + message.selectedSources?.let { + if (selectedSources != it) { + selectedSources = it + changed = true } - message.onStageEndpoints?.let { - val newOnStageSources = it.map { endpoint -> endpointIdToSourceName(endpoint) } - if (onStageSources != newOnStageSources) { - onStageSources = newOnStageSources - changed = true - } + } + message.onStageSources?.let { + if (onStageSources != it) { + onStageSources = it + changed = true } } } else { message.selectedEndpoints?.let { - if (selectedEndpoints != it) { - selectedEndpoints = it + val newSelectedSources = it.map { endpoint -> endpointIdToSourceName(endpoint) } + if (selectedSources != newSelectedSources) { + selectedSources = newSelectedSources changed = true } } message.onStageEndpoints?.let { - if (onStageEndpoints != it) { - onStageEndpoints = it + val newOnStageSources = it.map { endpoint -> endpointIdToSourceName(endpoint) } + if (onStageSources != newOnStageSources) { + onStageSources = newOnStageSources changed = true } } @@ -168,7 +135,7 @@ internal class AllocationSettingsWrapper(private val useSourceNames: Boolean) { var newConstraints = it // Convert endpoint IDs to source names - if (MultiStreamConfig.config.enabled && !useSourceNames) { + if (!useSourceNames) { newConstraints = HashMap(it.size) it.entries.stream().forEach { entry -> diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BandwidthAllocation.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BandwidthAllocation.kt index e282ff407d..431adb0edf 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BandwidthAllocation.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BandwidthAllocation.kt @@ -17,7 +17,6 @@ package org.jitsi.videobridge.cc.allocation import org.jitsi.nlj.MediaSourceDesc import org.jitsi.nlj.RtpLayerDesc -import org.jitsi.videobridge.MultiStreamConfig import org.json.simple.JSONObject /** @@ -36,10 +35,7 @@ class BandwidthAllocation @JvmOverloads constructor( allocations.filter { it.isForwarded() }.map { it.endpointId }.toSet() val forwardedSources: Set = - if (MultiStreamConfig.config.enabled) - allocations.filter { it.isForwarded() }.map { it.mediaSource?.sourceName!! }.toSet() - else - emptySet() + allocations.filter { it.isForwarded() }.map { it.mediaSource?.sourceName!! }.toSet() /** * Whether the two allocations have the same endpoints and same layers. diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BitrateController.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BitrateController.kt index c86ea55556..87e47f28d5 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BitrateController.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/BitrateController.kt @@ -26,7 +26,6 @@ import org.jitsi.utils.event.SyncEventEmitter import org.jitsi.utils.logging.DiagnosticContext import org.jitsi.utils.logging.TimeSeriesLogger import org.jitsi.utils.logging2.Logger -import org.jitsi.videobridge.MultiStreamConfig import org.jitsi.videobridge.cc.config.BitrateControllerConfig.Companion.config import org.jitsi.videobridge.message.ReceiverVideoConstraintsMessage import org.jitsi.videobridge.util.BooleanStateTimeTracker @@ -177,14 +176,6 @@ class BitrateController @JvmOverloads constructor( } } - /** - * Query whether this endpoint is on stage or selected, as of the most recent - * video constraints. - */ - fun isOnStageOrSelected(endpoint: T) = - allocationSettings.onStageEndpoints.contains(endpoint.id) || - allocationSettings.selectedEndpoints.contains(endpoint.id) - /** * Query whether this source is on stage or selected, as of the most recent * video constraints @@ -193,25 +184,11 @@ class BitrateController @JvmOverloads constructor( allocationSettings.onStageSources.contains(source.sourceName) || allocationSettings.selectedSources.contains(source.sourceName) - /** - * Query whether this allocator is forwarding a source from a given endpoint, as of its - * most recent allocation decision. - */ - fun isForwarding(endpoint: T) = bandwidthAllocator.isForwarding(endpoint.id) - - /** - * Query whether this allocator has non-zero effective constraints for a given endpoint. - */ - fun hasNonZeroEffectiveConstraints(endpoint: T) = - !MultiStreamConfig.config.enabled && - bandwidthAllocator.hasNonZeroEffectiveConstraints(endpoint.id) - /** * Query whether this allocator has non-zero effective constraints for a given source */ fun hasNonZeroEffectiveConstraints(source: MediaSourceDesc) = - MultiStreamConfig.config.enabled && - bandwidthAllocator.hasNonZeroEffectiveConstraints(source.sourceName) + bandwidthAllocator.hasNonZeroEffectiveConstraints(source.sourceName) /** * Get the target and ideal bitrate of the current [BandwidthAllocation], as well as the list of SSRCs being @@ -301,7 +278,7 @@ class BitrateController @JvmOverloads constructor( // Actually implement the allocation (configure the packet filter to forward the chosen target layers). packetHandler.allocationChanged(allocation) - if (MultiStreamConfig.config.enabled && useSourceNames) { + if (useSourceNames) { // If rewriting SSRCs, all active sources will be notified separately. if (!doSsrcRewriting) { // TODO as per George's comment above: should this message be sent on message transport connect? diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/Prioritize.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/Prioritize.kt index 3fe9decb8a..3236d9cfe9 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/Prioritize.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/Prioritize.kt @@ -20,28 +20,7 @@ import org.jitsi.videobridge.calculateLastN import org.jitsi.videobridge.jvbLastNSingleton import org.jitsi.videobridge.load_management.ConferenceSizeLastNLimits.Companion.singleton as conferenceSizeLimits -/** - * @param selectedEndpointIds the IDs of the selected endpoints, in order of selection. - * @param conferenceEndpoints the conference endpoints in no particular order. - * - * @return the endpoints from `conferenceEndpoints` ordered by selection first, and then speech activity. - */ -@Deprecated("", ReplaceWith("prioritize2"), DeprecationLevel.WARNING) -fun prioritize( - conferenceEndpoints: MutableList, - selectedEndpointIds: List = emptyList() -): List { - // Bump selected endpoints to the top of the list. - selectedEndpointIds.asReversed().forEach { selectedEndpointId -> - conferenceEndpoints.find { it.id == selectedEndpointId }?.let { selectedEndpoint -> - conferenceEndpoints.remove(selectedEndpoint) - conferenceEndpoints.add(0, selectedEndpoint) - } - } - return conferenceEndpoints -} - -fun prioritize2( +fun prioritize( conferenceSources: MutableList, selectedSourceNames: List = emptyList() ): List { @@ -56,34 +35,10 @@ fun prioritize2( return conferenceSources } -/** - * Return the "effective" constraints for the given endpoints, i.e. the constraints adjusted for LastN. - */ -fun getEffectiveConstraints(endpoints: List, allocationSettings: AllocationSettings): - Map { - - // Add 1 for the receiver endpoint, which is not in the list. - val effectiveLastN = effectiveLastN(allocationSettings.lastN, endpoints.size + 1) - - // Keep track of the number of endpoints with non-zero constraints. Once [effectiveLastN] of them have been - // added, all other endpoints have effectiveConstraints 0, because they would never be forwarded by the - // algorithm. - var endpointsWithNonZeroConstraints = 0 - return endpoints.associate { endpoint -> - endpoint.id to if (endpointsWithNonZeroConstraints >= effectiveLastN) { - VideoConstraints.NOTHING - } else { - allocationSettings.getConstraints(endpoint.id).also { - if (!it.isDisabled()) endpointsWithNonZeroConstraints++ - } - } - } -} - /** * Return the "effective" constraints for the given media sources, i.e. the constraints adjusted for LastN. */ -fun getEffectiveConstraints2(sources: List, allocationSettings: AllocationSettings): +fun getEffectiveConstraints(sources: List, allocationSettings: AllocationSettings): Map { // FIXME figure out before merge - is using source count instead of endpoints diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation.kt index 802419c1b8..e9a1fc9243 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation.kt @@ -1,5 +1,6 @@ /* - * Copyright @ 2020 - present 8x8, Inc. + * Copyright @ 2021 - present 8x8, Inc. + * Copyright @ 2021 - Vowel, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,6 +16,7 @@ */ package org.jitsi.videobridge.cc.allocation +import org.jitsi.nlj.MediaSourceDesc import org.jitsi.nlj.RtpLayerDesc import org.jitsi.nlj.RtpLayerDesc.Companion.indexString import org.jitsi.nlj.VideoType @@ -31,12 +33,14 @@ import java.time.Clock * algorithm, as opposed to [SingleAllocation] which is the end result. * * @author George Politis + * @author Pawel Domas */ internal class SingleSourceAllocation( - val endpoint: MediaSourceContainer, - /** The constraints to use while allocating bandwidth to this endpoint. */ + val endpointId: String, + val mediaSource: MediaSourceDesc, + /** The constraints to use while allocating bandwidth to this media source. */ val constraints: VideoConstraints, - /** Whether the endpoint is on stage. */ + /** Whether the source is on stage. */ private val onStage: Boolean, diagnosticContext: DiagnosticContext, clock: Clock, @@ -45,7 +49,7 @@ internal class SingleSourceAllocation( /** * The immutable list of layers to be considered when allocating bandwidth. */ - val layers: Layers = selectLayers(endpoint, onStage, constraints, clock.instant().toEpochMilli()) + val layers: Layers = selectLayers(mediaSource, onStage, constraints, clock.instant().toEpochMilli()) /** * The index (into [layers] of the current target layer). It can be improved in the `improve()` step, if there is @@ -56,7 +60,7 @@ internal class SingleSourceAllocation( init { if (timeSeriesLogger.isTraceEnabled) { val ratesTimeSeriesPoint = diagnosticContext.makeTimeSeriesPoint("layers_considered") - .addField("remote_endpoint_id", endpoint.id) + .addField("remote_endpoint_id", endpointId) for ((l, bitrate) in layers.layers) { ratesTimeSeriesPoint.addField( "${indexString(l.index)}_${l.height}p_${l.frameRate}fps_bps", @@ -182,15 +186,15 @@ internal class SingleSourceAllocation( */ val result: SingleAllocation get() = SingleAllocation( - endpoint.id, - endpoint.mediaSource, + endpointId, + mediaSource, targetLayer?.layer, layers.idealLayer?.layer ) override fun toString(): String { return ( - "[id=" + endpoint.id + + "[id=" + endpointId + " constraints=" + constraints + " ratedPreferredIdx=" + layers.preferredIndex + " ratedTargetIdx=" + targetIdx @@ -258,19 +262,18 @@ internal class SingleSourceAllocation( */ private fun selectLayers( /** The endpoint which is the source of the stream(s). */ - endpoint: MediaSourceContainer, + source: MediaSourceDesc, onStage: Boolean, - /** The constraints that the receiver specified for [endpoint]. */ + /** The constraints that the receiver specified for [source]. */ constraints: VideoConstraints, nowMs: Long ): Layers { - val source = endpoint.mediaSource - if (constraints.maxHeight == 0 || source == null || !source.hasRtpLayers()) { + if (constraints.maxHeight == 0 || !source.hasRtpLayers()) { return Layers.noLayers } val layers = source.rtpLayers.map { LayerSnapshot(it, it.getBitrateBps(nowMs)) } - return when (endpoint.videoType) { + return when (source.videoType) { VideoType.CAMERA -> selectLayersForCamera(layers, constraints) VideoType.DESKTOP, VideoType.DESKTOP_HIGH_FPS -> selectLayersForScreensharing(layers, constraints, onStage) else -> Layers.noLayers @@ -340,7 +343,7 @@ private fun List.lastIndexWhich(predicate: (T) -> Boolean): Int { * consider frame rates at least as high as the preferred. In practice this means we consider * 180p/7.5fps, 180p/15fps, 180p/30fps, 360p/30fps and 720p/30fps. */ -internal fun getPreferred(constraints: VideoConstraints): VideoConstraints { +private fun getPreferred(constraints: VideoConstraints): VideoConstraints { return if (constraints.maxHeight > 180 || !constraints.heightIsLimited()) { VideoConstraints(config.onstagePreferredHeightPx(), config.onstagePreferredFramerate()) } else { diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2.kt deleted file mode 100644 index d47801c7fa..0000000000 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2.kt +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Copyright @ 2021 - present 8x8, Inc. - * Copyright @ 2021 - Vowel, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jitsi.videobridge.cc.allocation - -import org.jitsi.nlj.MediaSourceDesc -import org.jitsi.nlj.RtpLayerDesc -import org.jitsi.nlj.RtpLayerDesc.Companion.indexString -import org.jitsi.nlj.VideoType -import org.jitsi.utils.logging.DiagnosticContext -import org.jitsi.utils.logging.TimeSeriesLogger -import org.jitsi.utils.logging2.Logger -import org.jitsi.utils.logging2.LoggerImpl -import org.jitsi.videobridge.cc.config.BitrateControllerConfig.Companion.config -import java.lang.Integer.max -import java.time.Clock - -/** - * A bitrate allocation that pertains to a specific source. This is the internal representation used in the allocation - * algorithm, as opposed to [SingleAllocation] which is the end result. - * - * @author George Politis - * @author Pawel Domas - */ -internal class SingleSourceAllocation2( - val endpointId: String, - val mediaSource: MediaSourceDesc, - /** The constraints to use while allocating bandwidth to this media source. */ - val constraints: VideoConstraints, - /** Whether the source is on stage. */ - private val onStage: Boolean, - diagnosticContext: DiagnosticContext, - clock: Clock, - val logger: Logger = LoggerImpl(SingleSourceAllocation::class.qualifiedName) -) { - /** - * The immutable list of layers to be considered when allocating bandwidth. - */ - val layers: Layers = selectLayers(mediaSource, onStage, constraints, clock.instant().toEpochMilli()) - - /** - * The index (into [layers] of the current target layer). It can be improved in the `improve()` step, if there is - * enough bandwidth. - */ - var targetIdx = -1 - - init { - if (timeSeriesLogger.isTraceEnabled) { - val ratesTimeSeriesPoint = diagnosticContext.makeTimeSeriesPoint("layers_considered") - .addField("remote_endpoint_id", endpointId) - for ((l, bitrate) in layers.layers) { - ratesTimeSeriesPoint.addField( - "${indexString(l.index)}_${l.height}p_${l.frameRate}fps_bps", - bitrate - ) - } - timeSeriesLogger.trace(ratesTimeSeriesPoint) - } - } - - fun isOnStage() = onStage - fun hasReachedPreferred(): Boolean = targetIdx >= layers.preferredIndex - - /** - * Implements an "improve" step, incrementing [.targetIdx] to the next layer if there is sufficient - * bandwidth. Note that this works eagerly up until the "preferred" layer (if any), and as a single step from - * then on. - * - * @param remainingBps the additional bandwidth which is available on top of the bitrate of the current target - * layer. - * @return the bandwidth "consumed" by the method, i.e. the difference between the resulting and initial target - * bitrate. E.g. if the target bitrate goes from 100 to 300 as a result if the method call, it will return 200. - */ - fun improve(remainingBps: Long, allowOversending: Boolean): Long { - val initialTargetBitrate = targetBitrate - val maxBps = remainingBps + initialTargetBitrate - if (layers.isEmpty()) { - return 0 - } - if (targetIdx == -1 && layers.preferredIndex > -1 && onStage) { - // Boost on stage participant to preferred, if there's enough bw. - for (i in layers.indices) { - if (i > layers.preferredIndex || maxBps < layers[i].bitrate) { - break - } - targetIdx = i - } - } else { - // Try the next element in the ratedIndices array. - if (targetIdx + 1 < layers.size && layers[targetIdx + 1].bitrate < maxBps) { - targetIdx++ - } - } - if (targetIdx > -1) { - // If there's a higher layer available with a lower bitrate, skip to it. - // - // For example, if 1080p@15fps is configured as a better subjective quality than 720p@30fps (i.e. it sits - // on a higher index in the ratedIndices array) and the bitrate that we measure for the 1080p stream is less - // than the bitrate that we measure for the 720p stream, then we "jump over" the 720p stream and immediately - // select the 1080p stream. - // - // TODO further: Should we just prune the list of layers we consider to not include such layers? - for (i in layers.size - 1 downTo targetIdx + 1) { - if (layers[i].bitrate <= layers[targetIdx].bitrate) { - targetIdx = i - } - } - } - - // If oversending is allowed, look for a better layer which doesn't exceed maxBps by more than - // `maxOversendBitrate`. - if (allowOversending && layers.oversendIndex >= 0 && targetIdx < layers.oversendIndex) { - for (i in layers.oversendIndex downTo targetIdx + 1) { - if (layers[i].bitrate <= maxBps + config.maxOversendBitrateBps()) { - targetIdx = i - } - } - } - // If the stream is non-scalable enable oversending regardless of maxOversendBitrate - if (allowOversending && targetIdx < 0 && layers.oversendIndex >= 0 && layers.hasOnlyOneLayer()) { - logger.warn( - "Oversending above maxOversendBitrate, layer bitrate " + - "${layers.layers[layers.oversendIndex].bitrate} bps" - ) - targetIdx = layers.oversendIndex - } - - val resultingTargetBitrate = targetBitrate - return resultingTargetBitrate - initialTargetBitrate - } - - /** - * The source is suspended if we've not selected a layer AND the source has active layers. - * - * TODO: this is not exactly correct because it only looks at the layers we consider. E.g. if the receiver set - * a maxHeight=0 constraint for an endpoint, it will appear suspended. This is not critical, because this val is - * only used for logging. - */ - val isSuspended: Boolean - get() = targetIdx == -1 && layers.isNotEmpty() && layers[0].bitrate > 0 - - /** - * Gets the target bitrate (in bps) for this endpoint allocation, i.e. the bitrate of the currently chosen layer. - */ - val targetBitrate: Long - get() = targetLayer?.bitrate?.toLong() ?: 0 - - private val targetLayer: LayerSnapshot? - get() = layers.getOrNull(targetIdx) - - /** - * Gets the ideal bitrate (in bps) for this endpoint allocation, i.e. the bitrate of the layer the bridge would - * forward if there were no (bandwidth) constraints. - */ - val idealBitrate: Long - get() = layers.idealLayer?.bitrate?.toLong() ?: 0 - - /** - * Exposed for testing only. - */ - val preferredLayer: RtpLayerDesc? - get() = layers.preferredLayer?.layer - - /** - * Exposed for testing only. - */ - val oversendLayer: RtpLayerDesc? - get() = layers.oversendLayer?.layer - - /** - * Creates the final immutable result of this allocation. Should be called once the allocation algorithm has - * completed. - */ - val result: SingleAllocation - get() = SingleAllocation( - endpointId, - mediaSource, - targetLayer?.layer, - layers.idealLayer?.layer - ) - - override fun toString(): String { - return ( - "[id=" + endpointId + - " constraints=" + constraints + - " ratedPreferredIdx=" + layers.preferredIndex + - " ratedTargetIdx=" + targetIdx - ) - } - - /** - * Selects from a list of layers the ones which should be considered when allocating bandwidth, as well as the - * "preferred" and "oversend" layers. Logic specific to screensharing: we prioritize resolution over framerate, - * prioritize the highest layer over other endpoints (by setting the highest layer as "preferred"), and allow - * oversending up to the highest resolution (with low frame rate). - */ - private fun selectLayersForScreensharing( - layers: List, - constraints: VideoConstraints, - onStage: Boolean - ): Layers { - - var activeLayers = layers.filter { it.bitrate > 0 } - // No active layers usually happens when the source has just been signaled and we haven't received - // any packets yet. Add the layers here, so one gets selected and we can start forwarding sooner. - if (activeLayers.isEmpty()) activeLayers = layers - - // We select all layers that satisfy the constraints. - var selectedLayers = - if (!constraints.heightIsLimited()) { - activeLayers - } else { - activeLayers.filter { it.layer.height <= constraints.maxHeight } - } - // If no layers satisfy the constraints, we use the layers with the lowest resolution. - if (selectedLayers.isEmpty()) { - val minHeight = activeLayers.minOfOrNull { it.layer.height } ?: return Layers.noLayers - selectedLayers = activeLayers.filter { it.layer.height == minHeight } - - // This recognizes the structure used with VP9 (multiple encodings with the same resolution and unknown frame - // rate). In this case, we only want the low quality layer. Unless we're on stage, in which case we should - // consider all layers. - if (!onStage && selectedLayers.isNotEmpty() && selectedLayers[0].layer.frameRate < 0) { - selectedLayers = listOf(selectedLayers[0]) - } - } - - val oversendIdx = if (onStage && config.allowOversendOnStage()) { - val maxHeight = selectedLayers.maxOfOrNull { it.layer.height } ?: return Layers.noLayers - // Of all layers with the highest resolution select the one with lowest bitrate. In case of VP9 the layers - // are not necessarily ordered by bitrate. - val lowestBitrateLayer = selectedLayers.filter { it.layer.height == maxHeight }.minByOrNull { it.bitrate } - ?: return Layers.noLayers - selectedLayers.indexOf(lowestBitrateLayer) - } else { - -1 - } - return Layers(selectedLayers, selectedLayers.size - 1, oversendIdx) - } - - /** - * Selects from the layers of a [MediaSourceContainer] the ones which should be considered when allocating bandwidth for - * an endpoint. Also selects the indices of the "preferred" and "oversend" layers. - * - * @param endpoint the [MediaSourceContainer] that describes the available layers. - * @param constraints the constraints signaled for the endpoint. - * @return the ordered list of [endpoint]'s layers which should be considered when allocating bandwidth, as well as the - * indices of the "preferred" and "oversend" layers. - */ - private fun selectLayers( - /** The endpoint which is the source of the stream(s). */ - source: MediaSourceDesc, - onStage: Boolean, - /** The constraints that the receiver specified for [source]. */ - constraints: VideoConstraints, - nowMs: Long - ): Layers { - if (constraints.maxHeight == 0 || !source.hasRtpLayers()) { - return Layers.noLayers - } - val layers = source.rtpLayers.map { LayerSnapshot(it, it.getBitrateBps(nowMs)) } - - return when (source.videoType) { - VideoType.CAMERA -> selectLayersForCamera(layers, constraints) - VideoType.DESKTOP, VideoType.DESKTOP_HIGH_FPS -> selectLayersForScreensharing(layers, constraints, onStage) - else -> Layers.noLayers - } - } - - /** - * Selects from a list of layers the ones which should be considered when allocating bandwidth, as well as the - * "preferred" and "oversend" layers. Logic specific to a camera stream: once the "preferred" height is reached we - * require a high frame rate, with preconfigured values for the "preferred" height and frame rate, and we do not allow - * oversending. - */ - private fun selectLayersForCamera( - layers: List, - constraints: VideoConstraints, - ): Layers { - - val minHeight = layers.map { it.layer.height }.minOrNull() ?: return Layers.noLayers - val noActiveLayers = layers.none { (_, bitrate) -> bitrate > 0 } - val (preferredHeight, preferredFps) = getPreferred(constraints) - - val ratesList: MutableList = ArrayList() - // Initialize the list of layers to be considered. These are the layers that satisfy the constraints, with - // a couple of exceptions (see comments below). - for (layerSnapshot in layers) { - val layer = layerSnapshot.layer - val lessThanPreferredHeight = layer.height < preferredHeight - val lessThanOrEqualMaxHeight = layer.height <= constraints.maxHeight || !constraints.heightIsLimited() - // If frame rate is unknown, consider it to be sufficient. - val atLeastPreferredFps = layer.frameRate < 0 || layer.frameRate >= preferredFps - if (lessThanPreferredHeight || - (lessThanOrEqualMaxHeight && atLeastPreferredFps) || - layer.height == minHeight - ) { - // No active layers usually happens when the source has just been signaled and we haven't received - // any packets yet. Add the layers here, so one gets selected and we can start forwarding sooner. - if (noActiveLayers || layerSnapshot.bitrate > 0) { - ratesList.add(layerSnapshot) - } - } - } - - val effectivePreferredHeight = max(preferredHeight, minHeight) - val preferredIndex = ratesList.lastIndexWhich { it.layer.height <= effectivePreferredHeight } - return Layers(ratesList, preferredIndex, -1) - } - - companion object { - private val timeSeriesLogger = TimeSeriesLogger.getTimeSeriesLogger(BandwidthAllocator::class.java) - } -} - -/** - * Returns the index of the last element of this list which satisfies the given predicate, or -1 if no elements do. - */ -private fun List.lastIndexWhich(predicate: (T) -> Boolean): Int { - var lastIndex = -1 - forEachIndexed { i, e -> if (predicate(e)) lastIndex = i } - return lastIndex -} diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/Relay.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/Relay.kt index 877816961e..9dea7bb3b3 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/Relay.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/Relay.kt @@ -68,7 +68,6 @@ import org.jitsi.videobridge.AbstractEndpoint import org.jitsi.videobridge.Conference import org.jitsi.videobridge.EncodingsManager import org.jitsi.videobridge.Endpoint -import org.jitsi.videobridge.MultiStreamConfig import org.jitsi.videobridge.PotentialPacketHandler import org.jitsi.videobridge.TransportConfig import org.jitsi.videobridge.message.BridgeChannelMessage @@ -524,21 +523,19 @@ class Relay @JvmOverloads constructor( fun relayMessageTransportConnected() { relayedEndpoints.values.forEach { e -> e.relayMessageTransportConnected() } - if (MultiStreamConfig.config.enabled) { - conference.endpoints.forEach { e -> - if (e is Endpoint || (e is RelayedEndpoint && e.relay.meshId != meshId)) { - e.mediaSources.forEach { msd: MediaSourceDesc -> - val sourceName = msd.sourceName!! // Source names are mandatory/enforced in multi stream mode - val videoType = msd.videoType - // Do not send the initial value for CAMERA, because it's the default - if (VideoType.CAMERA != videoType) { - val videoTypeMsg = SourceVideoTypeMessage( - videoType, - sourceName, - e.id - ) - sendMessage(videoTypeMsg) - } + conference.endpoints.forEach { e -> + if (e is Endpoint || (e is RelayedEndpoint && e.relay.meshId != meshId)) { + e.mediaSources.forEach { msd: MediaSourceDesc -> + val sourceName = msd.sourceName!! // Source names are mandatory/enforced in multi stream mode + val videoType = msd.videoType + // Do not send the initial value for CAMERA, because it's the default + if (VideoType.CAMERA != videoType) { + val videoTypeMsg = SourceVideoTypeMessage( + videoType, + sourceName, + e.id + ) + sendMessage(videoTypeMsg) } } } diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayMessageTransport.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayMessageTransport.kt index 412967b1f2..bd537a5da6 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayMessageTransport.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayMessageTransport.kt @@ -21,7 +21,6 @@ import org.eclipse.jetty.websocket.client.WebSocketClient import org.eclipse.jetty.websocket.core.CloseStatus import org.jitsi.utils.logging2.Logger import org.jitsi.videobridge.AbstractEndpointMessageTransport -import org.jitsi.videobridge.MultiStreamConfig import org.jitsi.videobridge.VersionConfig import org.jitsi.videobridge.Videobridge import org.jitsi.videobridge.message.AddReceiverMessage @@ -151,60 +150,25 @@ class RelayMessageTransport( * @return */ override fun addReceiver(message: AddReceiverMessage): BridgeChannelMessage? { - if (MultiStreamConfig.config.enabled) { - val sourceName = message.sourceName ?: run { - logger.error("Received AddReceiverMessage for with sourceName = null") - return null - } - val ep = relay.conference.findSourceOwner(sourceName) ?: run { - logger.warn("Received AddReceiverMessage for unknown or non-local: $sourceName") - return null - } - - ep.addReceiverV2(relay.id, sourceName, message.videoConstraints) - } else { - val epId = message.endpointId - val ep = relay.conference.getLocalEndpoint(epId) ?: run { - logger.warn("Received AddReceiverMessage for unknown or non-local epId $epId") - return null - } - - ep.addReceiver(relay.id, message.videoConstraints) - } - return null - } - - override fun videoType(message: VideoTypeMessage): BridgeChannelMessage? { - val epId = message.endpointId - if (epId == null) { - logger.warn("Received VideoTypeMessage over relay channel with no endpoint ID") + val sourceName = message.sourceName ?: run { + logger.error("Received AddReceiverMessage for with sourceName = null") return null } - - if (MultiStreamConfig.config.enabled) { - logger.error("Relay: unexpected video type message while in the multi-stream mode, eId=$epId") - return null - } - - val ep = relay.getEndpoint(epId) - - if (ep == null) { - logger.warn("Received VideoTypeMessage for unknown epId $epId") + val ep = relay.conference.findSourceOwner(sourceName) ?: run { + logger.warn("Received AddReceiverMessage for unknown or non-local: $sourceName") return null } - ep.setVideoType(message.videoType) - - relay.conference.sendMessageFromRelay(message, false, relay.meshId) + ep.addReceiver(relay.id, sourceName, message.videoConstraints) + return null + } + override fun videoType(message: VideoTypeMessage): BridgeChannelMessage? { + logger.error("Relay: unexpected video type message: ${message.toJson()}") return null } override fun sourceVideoType(message: SourceVideoTypeMessage): BridgeChannelMessage? { - if (!MultiStreamConfig.config.enabled) { - return null - } - val epId = message.endpointId if (epId == null) { logger.warn("Received SourceVideoTypeMessage over relay channel with no endpoint ID") diff --git a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayedEndpoint.kt b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayedEndpoint.kt index cc2ecec3ac..4258585067 100644 --- a/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayedEndpoint.kt +++ b/jvb/src/main/kotlin/org/jitsi/videobridge/relay/RelayedEndpoint.kt @@ -41,7 +41,6 @@ import org.jitsi.utils.logging2.Logger import org.jitsi.utils.logging2.cdebug import org.jitsi.videobridge.AbstractEndpoint import org.jitsi.videobridge.Conference -import org.jitsi.videobridge.MultiStreamConfig import org.jitsi.videobridge.cc.allocation.VideoConstraints import org.jitsi.videobridge.message.AddReceiverMessage import org.jitsi.videobridge.util.TaskPools @@ -160,13 +159,8 @@ class RelayedEndpoint( } fun relayMessageTransportConnected() { - if (MultiStreamConfig.config.enabled) { - maxReceiverVideoConstraintsMap.forEach { - (sourceName, constraints) -> - sendVideoConstraintsV2(sourceName, constraints) - } - } else { - sendVideoConstraints(maxReceiverVideoConstraints) + maxReceiverVideoConstraintsMap.forEach { (sourceName, constraints) -> + sendVideoConstraintsV2(sourceName, constraints) } } @@ -177,9 +171,7 @@ class RelayedEndpoint( override var mediaSources: Array get() = _mediaSources.getMediaSources() set(value) { - if (MultiStreamConfig.config.enabled) { - applyVideoTypeCache(value) - } + applyVideoTypeCache(value) val changed = _mediaSources.setMediaSources(value) val mergedMediaSources = _mediaSources.getMediaSources() val signaledMediaSources = if (value === mergedMediaSources) value.copy() else value diff --git a/jvb/src/main/resources/reference.conf b/jvb/src/main/resources/reference.conf index 9e794561f1..390449b22c 100644 --- a/jvb/src/main/resources/reference.conf +++ b/jvb/src/main/resources/reference.conf @@ -292,11 +292,6 @@ videobridge { } } - # The experimental multiple streams per endpoint support - multi-stream { - enabled = true - } - speech-activity { # The number of speakers to include in the list of recent speakers sent with dominant speaker change # notifications. diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/MediaSourceFactoryTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/MediaSourceFactoryTest.kt index 3646cda849..e896e0be8e 100644 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/MediaSourceFactoryTest.kt +++ b/jvb/src/test/kotlin/org/jitsi/videobridge/MediaSourceFactoryTest.kt @@ -20,7 +20,6 @@ import io.kotest.assertions.throwables.shouldThrow import io.kotest.core.spec.IsolationMode import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.shouldBe -import org.jitsi.config.setNewConfig import org.jitsi.videobridge.xmpp.MediaSourceFactory import org.jitsi.xmpp.extensions.colibri.SourcePacketExtension @@ -32,27 +31,13 @@ class MediaSourceFactoryTest : ShouldSpec() { init { context("MediaSourceFactory") { - context("when multi-stream support is enabled") { - setNewConfig(configWithMultiStreamEnabled, true) - - context("should throw an exception if there's no source name in the packet extension") { - val videoSource: SourcePacketExtension = createSource(1) - - val exception = shouldThrow { - MediaSourceFactory.createMediaSources(listOf(videoSource), emptyList()) - } - exception.message shouldBe "The 'name' is missing in the source description" - } - } - - context("when multi-stream support is disabled") { - setNewConfig(configWithMultiStreamDisabled, true) - - context("should NOT throw an exception if there's no source name in the packet extension") { - val videoSource: SourcePacketExtension = createSource(1) + context("should throw an exception if there's no source name in the packet extension") { + val videoSource: SourcePacketExtension = createSource(1) + val exception = shouldThrow { MediaSourceFactory.createMediaSources(listOf(videoSource), emptyList()) } + exception.message shouldBe "The 'name' is missing in the source description" } } } diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/MultiStreamConfigTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/MultiStreamConfigTest.kt deleted file mode 100644 index ae8da65d6f..0000000000 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/MultiStreamConfigTest.kt +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright @ 2021 - present 8x8, Inc. - * Copyright @ 2021 - Vowel, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jitsi.videobridge - -import io.kotest.matchers.shouldBe -import org.jitsi.ConfigTest -import org.jitsi.config.withNewConfig - -class MultiStreamConfigTest : ConfigTest() { - init { - context("multi-stream-config") { - context("when enabled") { - withNewConfig(configWithMultiStreamEnabled) { - MultiStreamConfig.config.enabled shouldBe true - } - } - context("when disabled") { - withNewConfig(configWithMultiStreamDisabled) { - MultiStreamConfig.config.enabled shouldBe false - } - } - } - } -} - -val configWithMultiStreamEnabled = """ - videobridge.multi-stream.enabled = true -""".trimIndent() - -val configWithMultiStreamDisabled = """ - videobridge.multi-stream.enabled = false -""".trimIndent() diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerNewTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerNewTest.kt deleted file mode 100644 index 66b6c86484..0000000000 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerNewTest.kt +++ /dev/null @@ -1,1501 +0,0 @@ -/* - * Copyright @ 2018 - present 8x8, Inc. - * Copyright @ 2021 - Vowel, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jitsi.videobridge.cc.allocation - -import io.kotest.core.spec.IsolationMode -import io.kotest.core.spec.Spec -import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.collections.shouldContainInOrder -import io.kotest.matchers.longs.shouldBeWithinPercentageOf -import io.kotest.matchers.shouldBe -import io.mockk.CapturingSlot -import io.mockk.every -import io.mockk.mockk -import org.jitsi.config.setNewConfig -import org.jitsi.nlj.MediaSourceDesc -import org.jitsi.nlj.PacketInfo -import org.jitsi.nlj.RtpEncodingDesc -import org.jitsi.nlj.VideoType -import org.jitsi.nlj.format.RtxPayloadType -import org.jitsi.nlj.rtp.VideoRtpPacket -import org.jitsi.nlj.util.bps -import org.jitsi.nlj.util.kbps -import org.jitsi.nlj.util.mbps -import org.jitsi.utils.logging.DiagnosticContext -import org.jitsi.utils.logging2.createLogger -import org.jitsi.utils.ms -import org.jitsi.utils.secs -import org.jitsi.utils.time.FakeClock -import org.jitsi.videobridge.cc.config.BitrateControllerConfig -import org.jitsi.videobridge.message.ReceiverVideoConstraintsMessage -import org.jitsi.videobridge.util.TaskPools -import java.util.concurrent.ScheduledExecutorService -import java.util.concurrent.TimeUnit -import java.util.function.Supplier - -class BitrateControllerNewTest : ShouldSpec() { - override fun isolationMode() = IsolationMode.InstancePerLeaf - - private val logger = createLogger() - private val clock = FakeClock() - private val bc = BitrateControllerWrapper2(createEndpoints2("A", "B", "C", "D"), clock = clock) - private val A = bc.endpoints.find { it.id == "A" }!! as TestEndpoint2 - private val B = bc.endpoints.find { it.id == "B" }!! as TestEndpoint2 - private val C = bc.endpoints.find { it.id == "C" }!! as TestEndpoint2 - private val D = bc.endpoints.find { it.id == "D" }!! as TestEndpoint2 - - override suspend fun beforeSpec(spec: Spec) = super.beforeSpec(spec).also { - // We disable the threshold, causing [BandwidthAllocator] to make a new decision every time BWE changes. This is - // because these tests are designed to test the decisions themselves and not necessarily when they are made. - setNewConfig( - """ - videobridge.cc { - bwe-change-threshold = 0 - // Effectively disable periodic updates. - max-time-between-calculations = 1 hour - } - """.trimIndent(), - true - ) - } - - override suspend fun afterSpec(spec: Spec) = super.afterSpec(spec).also { - bc.bc.expire() - setNewConfig("", true) - } - - init { - context("Expire") { - val captureDelay = CapturingSlot() - val captureDelayTimeunit = CapturingSlot() - val captureCancel = CapturingSlot() - val executor: ScheduledExecutorService = mockk { - every { schedule(any(), capture(captureDelay), capture(captureDelayTimeunit)) } returns mockk { - every { cancel(capture(captureCancel)) } returns true - } - } - TaskPools.SCHEDULED_POOL = executor - val bc = BitrateControllerWrapper2(createEndpoints2(), clock = clock) - val delayMs = TimeUnit.MILLISECONDS.convert(captureDelay.captured, captureDelayTimeunit.captured) - - delayMs.shouldBeWithinPercentageOf( - BitrateControllerConfig.config.maxTimeBetweenCalculations().toMillis(), - 10.0 - ) - - captureCancel.isCaptured shouldBe false - bc.bc.expire() - captureCancel.isCaptured shouldBe true - - TaskPools.resetScheduledPool() - } - context("Prioritization") { - context("Without selection") { - val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") - val ordered = prioritize2(sources) - ordered.map { it.sourceName } shouldBe listOf("s6", "s5", "s4", "s3", "s2", "s1") - } - context("With one selected") { - val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") - val ordered = prioritize2(sources, listOf("s2")) - ordered.map { it.sourceName } shouldBe listOf("s2", "s6", "s5", "s4", "s3", "s1") - } - context("With multiple selected") { - val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") - val ordered = prioritize2(sources, listOf("s2", "s1", "s5")) - ordered.map { it.sourceName } shouldBe listOf("s2", "s1", "s5", "s6", "s4", "s3") - } - } - - context("Allocation") { - context("Stage view") { - context("When LastN is not set") { - context("and the dominant speaker is on stage") { - listOf(true, false).forEach { screensharing -> - context("With ${if (screensharing) "screensharing" else "camera"}") { - if (screensharing) { - A.mediaSources[0].videoType = VideoType.DESKTOP - } - bc.setEndpointOrdering(A, B, C, D) - bc.setStageView("A-v0") - - bc.bc.allocationSettings.lastN shouldBe -1 - bc.bc.allocationSettings.selectedSources shouldBe emptyList() - bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") - - runBweLoop() - - verifyStageView(screensharing) - } - } - } - context("and a non-dominant speaker is on stage") { - bc.setEndpointOrdering(B, A, C, D) - bc.setStageView("A-v0") - - bc.bc.allocationSettings.lastN shouldBe -1 - bc.bc.allocationSettings.selectedSources shouldBe emptyList() - bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") - runBweLoop() - - verifyStageView() - } - } - context("When LastN=0") { - // LastN=0 is used when the client goes in "audio-only" mode. - bc.setEndpointOrdering(A, B, C, D) - bc.setStageView("A", lastN = 0) - - bc.bc.allocationSettings.lastN shouldBe 0 - bc.bc.allocationSettings.selectedSources shouldBe emptyList() - bc.bc.allocationSettings.onStageSources shouldBe listOf("A") - - runBweLoop() - - verifyLastN0() - } - context("When LastN=1") { - // LastN=1 is used when the client goes in "audio-only" mode, but someone starts a screenshare. - context("and the dominant speaker is on-stage") { - bc.setEndpointOrdering(A, B, C, D) - bc.setStageView("A-v0", lastN = 1) - - bc.bc.allocationSettings.lastN shouldBe 1 - bc.bc.allocationSettings.selectedSources shouldBe emptyList() - bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") - - runBweLoop() - - verifyStageViewLastN1() - } - context("and a non-dominant speaker is on stage") { - bc.setEndpointOrdering(B, A, C, D) - bc.setStageView("A-v0", lastN = 1) - - bc.bc.allocationSettings.lastN shouldBe 1 - bc.bc.allocationSettings.selectedSources shouldBe emptyList() - bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") - - runBweLoop() - - verifyStageViewLastN1() - } - } - } - context("Tile view") { - bc.setEndpointOrdering(A, B, C, D) - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0") - - bc.bc.allocationSettings.lastN shouldBe -1 - bc.bc.allocationSettings.selectedSources shouldBe - listOf("A-v0", "B-v0", "C-v0", "D-v0") - - context("When LastN is not set") { - runBweLoop() - - verifyTileView() - } - context("When LastN=0") { - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 0) - runBweLoop() - - verifyLastN0() - } - context("When LastN=1") { - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 1) - runBweLoop() - - verifyTileViewLastN1() - } - } - context("Tile view 360p") { - bc.setEndpointOrdering(A, B, C, D) - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", maxFrameHeight = 360) - - bc.bc.allocationSettings.lastN shouldBe -1 - // The legacy API (currently used by jitsi-meet) uses "selected count > 0" to infer TileView, - // and in tile view we do not use selected endpoints. - bc.bc.allocationSettings.selectedSources shouldBe - listOf("A-v0", "B-v0", "C-v0", "D-v0") - - context("When LastN is not set") { - runBweLoop() - - verifyTileView360p() - } - context("When LastN=0") { - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 0, maxFrameHeight = 360) - runBweLoop() - - verifyLastN0() - } - context("When LastN=1") { - bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 1, maxFrameHeight = 360) - runBweLoop() - - verifyTileViewLastN1(360) - } - } - context("Selected sources should override the dominant speaker (with new signaling)") { - // A is dominant speaker, A and B are selected. With LastN=2 we should always forward the selected - // sources regardless of who is speaking. - // The exact flow of this scenario was taken from a (non-jitsi-meet) client. - bc.setEndpointOrdering(A, B, C, D) - bc.bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage( - selectedSources = listOf("A-v0", "B-v0"), - constraints = mapOf("A-v0" to VideoConstraints(720), "B-v0" to VideoConstraints(720)) - ) - ) - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(720), - "B-v0" to VideoConstraints(720), - "C-v0" to VideoConstraints(180), - "D-v0" to VideoConstraints(180) - ) - - bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = 2)) - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(720), - "B-v0" to VideoConstraints(720), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - - bc.bc.allocationSettings.lastN shouldBe 2 - bc.bc.allocationSettings.selectedSources shouldBe listOf("A-v0", "B-v0") - - clock.elapse(20.secs) - bc.bwe = 10.mbps - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - // B becomes dominant speaker. - bc.setEndpointOrdering(B, A, C, D) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - bc.bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage( - constraints = mapOf("A-v0" to VideoConstraints(360), "B-v0" to VideoConstraints(360)) - ) - ) - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(360), - "B-v0" to VideoConstraints(360), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - - clock.elapse(2.secs) - // This should change nothing, the selection didn't change. - bc.bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage(selectedSources = listOf("A-v0", "B-v0")) - ) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = -1)) - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(360), - "B-v0" to VideoConstraints(360), - "C-v0" to VideoConstraints(180), - "D-v0" to VideoConstraints(180) - ) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0", "C-v0", "D-v0")) - - bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = 2)) - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(360), - "B-v0" to VideoConstraints(360), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - clock.elapse(2.secs) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - // D is now dominant speaker, but it should not override the selected endpoints. - bc.setEndpointOrdering(D, B, A, C) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - bc.bwe = 10.mbps - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - bc.bwe = 0.mbps - clock.elapse(2.secs) - bc.bwe = 10.mbps - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - - clock.elapse(2.secs) - // C is now dominant speaker, but it should not override the selected endpoints. - bc.setEndpointOrdering(C, D, A, B) - bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) - } - } - } - - private fun runBweLoop() { - for (bwe in 0..5_000_000 step 10_000) { - bc.bwe = bwe.bps - clock.elapse(100.ms) - } - logger.info("Forwarded sources history: ${bc.forwardedSourcesHistory}") - logger.info("Effective constraints history: ${bc.effectiveConstraintsHistory}") - logger.info("Allocation history: ${bc.allocationHistory}") - } - - private fun verifyStageViewScreensharing() { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0"), - setOf("A-v0", "B-v0"), - setOf("A-v0", "B-v0", "C-v0"), - setOf("A-v0", "B-v0", "C-v0", "D-v0") - ) - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(720), - "B-v0" to VideoConstraints(180), - "C-v0" to VideoConstraints(180), - "D-v0" to VideoConstraints(180) - ) - - // At this stage the purpose of this is just to document current behavior. - // TODO: the allocations for bwe=-1 are wrong. - bc.allocationHistory.removeIf { it.bwe < 0.bps } - - bc.allocationHistory.shouldMatchInOrder( - // We expect to be oversending when screensharing is used. - Event( - 0.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = true - ) - ), - Event( - 160.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = true - ) - ), - Event( - 660.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = false - ) - ), - Event( - 1320.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd15), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 2000.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 2050.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 2100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 2150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2200.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2250.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2300.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2350.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2400.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2460.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ) - ) - } - - private fun verifyStageView(screensharing: Boolean = false) { - when (screensharing) { - true -> verifyStageViewScreensharing() - false -> verifyStageViewCamera() - } - } - - private fun verifyStageViewCamera() { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0"), - setOf("A-v0", "B-v0"), - setOf("A-v0", "B-v0", "C-v0"), - setOf("A-v0", "B-v0", "C-v0", "D-v0") - ) - // TODO add forwarded sources history here - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(720), - "B-v0" to VideoConstraints(180), - "C-v0" to VideoConstraints(180), - "D-v0" to VideoConstraints(180) - ) - - // At this stage the purpose of this is just to document current behavior. - // TODO: the allocations for bwe=-1 are wrong. - bc.allocationHistory.removeIf { it.bwe < 0.bps } - - bc.allocationHistory.shouldMatchInOrder( - Event( - 50.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = false - ) - ), - Event( - 100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 500.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 550.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 600.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 650.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 700.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 750.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 800.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 850.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 900.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 960.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ), - Event( - 2150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2200.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2250.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 2300.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2350.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2400.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 2460.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ) - ) - } - - private fun verifyLastN0() { - // No video forwarded even with high BWE. - bc.forwardedSourcesHistory.size shouldBe 0 - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(0), - "B-v0" to VideoConstraints(0), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - - // TODO: The history contains 3 identical elements, which is probably a bug. - bc.allocationHistory.last().event.shouldMatch( - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = noVideo), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ) - } - - private fun verifyStageViewLastN1() { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0") - ) - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(720), - "B-v0" to VideoConstraints(0), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - - // At this stage the purpose of this is just to document current behavior. - // TODO: the allocations for bwe=-1 are wrong. - bc.allocationHistory.removeIf { it.bwe < 0.bps } - - bc.allocationHistory.shouldMatchInOrder( - Event( - 50.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 500.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 2010.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = hd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ) - ) - } - - private fun verifyTileView() { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0"), - setOf("A-v0", "B-v0"), - setOf("A-v0", "B-v0", "C-v0"), - setOf("A-v0", "B-v0", "C-v0", "D-v0") - ) - - bc.allocationHistory.shouldMatchInOrder( - Event( - (-1).bps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = noVideo), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 50.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 200.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 250.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 300.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 350.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 400.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 450.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 500.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 550.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 610.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ) - ) - } - - private fun verifyTileView360p() { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0"), - setOf("A-v0", "B-v0"), - setOf("A-v0", "B-v0", "C-v0"), - setOf("A-v0", "B-v0", "C-v0", "D-v0") - ) - - bc.allocationHistory.shouldMatchInOrder( - Event( - (-1).bps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = noVideo), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 50.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = false - ) - ), - Event( - 100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 150.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 200.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 250.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld7_5), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 300.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld7_5), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 350.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld7_5) - ) - ) - ), - Event( - 400.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 450.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld15), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 500.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld15), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 550.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld15) - ) - ) - ), - Event( - 610.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ), - Event( - 960.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = ld30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ), - Event( - 1310.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = sd30), - SingleAllocation(C, targetLayer = ld30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ), - Event( - 1660.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = sd30), - SingleAllocation(C, targetLayer = sd30), - SingleAllocation(D, targetLayer = ld30) - ) - ) - ), - Event( - 2010.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = sd30), - SingleAllocation(C, targetLayer = sd30), - SingleAllocation(D, targetLayer = sd30) - ) - ) - ) - ) - } - - private fun verifyTileViewLastN1(maxFrameHeight: Int = 180) { - // At this stage the purpose of this is just to document current behavior. - // TODO: The results with bwe==-1 are wrong. - bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( - setOf("A-v0") - ) - - bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A-v0" to VideoConstraints(maxFrameHeight), - "B-v0" to VideoConstraints(0), - "C-v0" to VideoConstraints(0), - "D-v0" to VideoConstraints(0) - ) - - val expectedAllocationHistory = mutableListOf( - // TODO: do we want to oversend in tile view? - Event( - (-1).bps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = noVideo), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ), - oversending = true - ) - ), - Event( - 50.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld7_5), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 100.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld15), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ), - Event( - 160.kbps, // TODO: why 160 instead of 150? weird. - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = ld30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ) - ) - if (maxFrameHeight > 180) { - expectedAllocationHistory.addAll( - listOf( - Event( - 510.kbps, - BandwidthAllocation( - setOf( - SingleAllocation(A, targetLayer = sd30), - SingleAllocation(B, targetLayer = noVideo), - SingleAllocation(C, targetLayer = noVideo), - SingleAllocation(D, targetLayer = noVideo) - ) - ) - ) - ) - ) - } - bc.allocationHistory.shouldMatchInOrder(*expectedAllocationHistory.toTypedArray()) - } -} - -class BitrateControllerWrapper2(initialEndpoints: List, val clock: FakeClock = FakeClock()) { - var endpoints: List = initialEndpoints - val logger = createLogger() - - var bwe = (-1).bps - set(value) { - logger.debug("Setting bwe=$value") - field = value - bc.bandwidthChanged(value.bps.toLong()) - } - - // Save the output. - val effectiveConstraintsHistory: History> = mutableListOf() - val forwardedSourcesHistory: History> = mutableListOf() - val allocationHistory: History = mutableListOf() - - val bc = BitrateController( - object : BitrateController.EventHandler { - override fun forwardedEndpointsChanged(forwardedEndpoints: Set) { } - - override fun forwardedSourcesChanged(forwardedSources: Set) { - Event(bwe, forwardedSources, clock.instant()).apply { - logger.info("Forwarded sources changed: $this") - forwardedSourcesHistory.add(this) - } - } - - override fun sourceListChanged(sourceList: List) { - } - - override fun effectiveVideoConstraintsChanged( - oldEffectiveConstraints: Map, - newEffectiveConstraints: Map - ) { - Event(bwe, newEffectiveConstraints, clock.instant()).apply { - logger.info("Effective constraints changed: $this") - effectiveConstraintsHistory.add(this) - } - } - - override fun keyframeNeeded(endpointId: String?, ssrc: Long) {} - - override fun allocationChanged(allocation: BandwidthAllocation) { - Event( - bwe, - allocation, - clock.instant() - ).apply { - logger.info("Allocation changed: $this") - allocationHistory.add(this) - } - } - }, - Supplier { endpoints }, - DiagnosticContext(), - logger, - true, // TODO merge BitrateControllerNewTest with old and use this flag - false, - clock - ) - - fun setEndpointOrdering(vararg endpoints: TestEndpoint2) { - logger.info("Set endpoints ${endpoints.map{ it.id }.joinToString(",")}") - this.endpoints = endpoints.toList() - bc.endpointOrderingChanged() - } - - fun setStageView(onStageSource: String, lastN: Int? = null) { - bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage( - lastN = lastN, - onStageSources = listOf(onStageSource), - constraints = mapOf(onStageSource to VideoConstraints(720)) - ) - ) - } - - fun setTileView( - vararg selectedSources: String, - maxFrameHeight: Int = 180, - lastN: Int? = null - ) { - bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage( - lastN = lastN, - selectedSources = listOf(*selectedSources), - constraints = selectedSources.map { it to VideoConstraints(maxFrameHeight) }.toMap() - ) - ) - } - - init { - // The BC only starts working 10 seconds after it first received media, so fake that. - bc.transformRtp(PacketInfo(VideoRtpPacket(ByteArray(100), 0, 100))) - clock.elapse(15.secs) - - // Adaptivity is disabled when RTX support is not signalled. - bc.addPayloadType(RtxPayloadType(123, mapOf("apt" to "124"))) - } -} - -class TestEndpoint2( - override val id: String, - override val mediaSources: Array = emptyArray(), - override val videoType: VideoType = VideoType.CAMERA, - override val mediaSource: MediaSourceDesc? = null, -) : MediaSourceContainer - -fun createEndpoints2(vararg ids: String): MutableList { - return MutableList(ids.size) { i -> - TestEndpoint2( - ids[i], - arrayOf( - createSourceDesc( - 3 * i + 1, - 3 * i + 2, - 3 * i + 3, - ids[i] + "-v0", - ids[i] - ) - ) - ) - } -} - -fun createSources(vararg ids: String): MutableList { - return MutableList(ids.size) { i -> - createSourceDesc( - 3 * i + 1, - 3 * i + 2, - 3 * i + 3, - ids[i], - null - ) - } -} - -fun createSourceDesc( - ssrc1: Int, - ssrc2: Int, - ssrc3: Int, - sourceName: String, - owner: String? -): MediaSourceDesc = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(ssrc1.toLong(), arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(ssrc2.toLong(), arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(ssrc3.toLong(), arrayOf(hd7_5, hd15, hd30)) - ), - sourceName = sourceName, - owner = owner -) diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerPerfTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerPerfTest.kt index e12579d104..18b8082e9a 100644 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerPerfTest.kt +++ b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerPerfTest.kt @@ -15,7 +15,10 @@ */ package org.jitsi.videobridge.cc.allocation +import io.kotest.assertions.withClue import io.kotest.core.spec.style.StringSpec +import io.kotest.matchers.nulls.shouldNotBeNull +import io.kotest.matchers.shouldBe import org.jitsi.nlj.MediaSourceDesc import org.jitsi.nlj.PacketInfo import org.jitsi.nlj.format.RtxPayloadType @@ -149,3 +152,26 @@ class BitrateControllerPerfTest : StringSpec() { } const val NUM_SPEAKER_CHANGES = 1_000_000 + +fun BandwidthAllocation.shouldMatch(other: BandwidthAllocation) { + allocations.size shouldBe other.allocations.size + allocations.forEach { thisSingleAllocation -> + withClue("Allocation for ${thisSingleAllocation.endpointId}") { + val otherSingleAllocation = other.allocations.find { it.endpointId == thisSingleAllocation.endpointId } + otherSingleAllocation.shouldNotBeNull() + thisSingleAllocation.targetLayer?.height shouldBe otherSingleAllocation.targetLayer?.height + thisSingleAllocation.targetLayer?.frameRate shouldBe otherSingleAllocation.targetLayer?.frameRate + } + } +} + +fun List>.shouldMatchInOrder(vararg events: Event) { + size shouldBe events.size + events.forEachIndexed { i, it -> + this[i].bwe shouldBe it.bwe + withClue("bwe=${it.bwe}") { + this[i].event.shouldMatch(it.event) + } + // Ignore this.time + } +} diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerTest.kt index 6762255094..af42e00021 100644 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerTest.kt +++ b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/BitrateControllerTest.kt @@ -1,5 +1,6 @@ /* * Copyright @ 2018 - present 8x8, Inc. + * Copyright @ 2021 - Vowel, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,16 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.jitsi.videobridge.cc.allocation -import io.kotest.assertions.withClue import io.kotest.core.spec.IsolationMode import io.kotest.core.spec.Spec import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldContainInOrder -import io.kotest.matchers.nulls.shouldNotBeNull +import io.kotest.matchers.longs.shouldBeWithinPercentageOf import io.kotest.matchers.shouldBe +import io.mockk.CapturingSlot +import io.mockk.every +import io.mockk.mockk import org.jitsi.config.setNewConfig import org.jitsi.nlj.MediaSourceDesc import org.jitsi.nlj.PacketInfo @@ -40,29 +42,28 @@ import org.jitsi.utils.logging2.createLogger import org.jitsi.utils.ms import org.jitsi.utils.secs import org.jitsi.utils.time.FakeClock -import org.jitsi.videobridge.configWithMultiStreamDisabled +import org.jitsi.videobridge.cc.config.BitrateControllerConfig import org.jitsi.videobridge.message.ReceiverVideoConstraintsMessage +import org.jitsi.videobridge.util.TaskPools import java.time.Instant +import java.util.concurrent.ScheduledExecutorService +import java.util.concurrent.TimeUnit import java.util.function.Supplier -// This tests the old flow which runs with multi stream flag disabled. -// TODO remove the old flow and unify BitrateControllerTest and BitrateControllerNewTest class BitrateControllerTest : ShouldSpec() { override fun isolationMode() = IsolationMode.InstancePerLeaf private val logger = createLogger() private val clock = FakeClock() private val bc = BitrateControllerWrapper(createEndpoints("A", "B", "C", "D"), clock = clock) - private val A: TestEndpoint = bc.endpoints.find { it.id == "A" }!! as TestEndpoint - private val B: TestEndpoint = bc.endpoints.find { it.id == "B" }!! as TestEndpoint - private val C: TestEndpoint = bc.endpoints.find { it.id == "C" }!! as TestEndpoint - private val D: TestEndpoint = bc.endpoints.find { it.id == "D" }!! as TestEndpoint + private val A = bc.endpoints.find { it.id == "A" }!! as TestEndpoint + private val B = bc.endpoints.find { it.id == "B" }!! as TestEndpoint + private val C = bc.endpoints.find { it.id == "C" }!! as TestEndpoint + private val D = bc.endpoints.find { it.id == "D" }!! as TestEndpoint - /** - * We disable the threshold, causing [BandwidthAllocator] to make a new decision every time BWE changes. This is - * because these tests are designed to test the decisions themselves and not necessarily when they are made. - */ override suspend fun beforeSpec(spec: Spec) = super.beforeSpec(spec).also { + // We disable the threshold, causing [BandwidthAllocator] to make a new decision every time BWE changes. This is + // because these tests are designed to test the decisions themselves and not necessarily when they are made. setNewConfig( """ videobridge.cc { @@ -70,8 +71,6 @@ class BitrateControllerTest : ShouldSpec() { // Effectively disable periodic updates. max-time-between-calculations = 1 hour } - // Also disable multi stream support, - $configWithMultiStreamDisabled """.trimIndent(), true ) @@ -83,21 +82,45 @@ class BitrateControllerTest : ShouldSpec() { } init { + context("Expire") { + val captureDelay = CapturingSlot() + val captureDelayTimeunit = CapturingSlot() + val captureCancel = CapturingSlot() + val executor: ScheduledExecutorService = mockk { + every { schedule(any(), capture(captureDelay), capture(captureDelayTimeunit)) } returns mockk { + every { cancel(capture(captureCancel)) } returns true + } + } + TaskPools.SCHEDULED_POOL = executor + val bc = BitrateControllerWrapper(createEndpoints(), clock = clock) + val delayMs = TimeUnit.MILLISECONDS.convert(captureDelay.captured, captureDelayTimeunit.captured) + + delayMs.shouldBeWithinPercentageOf( + BitrateControllerConfig.config.maxTimeBetweenCalculations().toMillis(), + 10.0 + ) + + captureCancel.isCaptured shouldBe false + bc.bc.expire() + captureCancel.isCaptured shouldBe true + + TaskPools.resetScheduledPool() + } context("Prioritization") { context("Without selection") { - val endpoints = createEndpoints("F", "E", "D", "C", "B", "A") - val ordered = prioritize(endpoints) - ordered.map { it.id } shouldBe listOf("F", "E", "D", "C", "B", "A") + val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") + val ordered = prioritize(sources) + ordered.map { it.sourceName } shouldBe listOf("s6", "s5", "s4", "s3", "s2", "s1") } context("With one selected") { - val endpoints = createEndpoints("F", "E", "D", "C", "B", "A") - val ordered = prioritize(endpoints, listOf("B")) - ordered.map { it.id } shouldBe listOf("B", "F", "E", "D", "C", "A") + val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") + val ordered = prioritize(sources, listOf("s2")) + ordered.map { it.sourceName } shouldBe listOf("s2", "s6", "s5", "s4", "s3", "s1") } context("With multiple selected") { - val endpoints = createEndpoints("F", "E", "D", "C", "B", "A") - val ordered = prioritize(endpoints, listOf("B", "A", "E")) - ordered.map { it.id } shouldBe listOf("B", "A", "E", "F", "D", "C") + val sources = createSources("s6", "s5", "s4", "s3", "s2", "s1") + val ordered = prioritize(sources, listOf("s2", "s1", "s5")) + ordered.map { it.sourceName } shouldBe listOf("s2", "s1", "s5", "s6", "s4", "s3") } } @@ -108,14 +131,14 @@ class BitrateControllerTest : ShouldSpec() { listOf(true, false).forEach { screensharing -> context("With ${if (screensharing) "screensharing" else "camera"}") { if (screensharing) { - A.videoType = VideoType.DESKTOP + A.mediaSources[0].videoType = VideoType.DESKTOP } bc.setEndpointOrdering(A, B, C, D) - bc.setStageView("A") + bc.setStageView("A-v0") bc.bc.allocationSettings.lastN shouldBe -1 - bc.bc.allocationSettings.selectedEndpoints shouldBe emptyList() - bc.bc.allocationSettings.onStageEndpoints shouldBe listOf("A") + bc.bc.allocationSettings.selectedSources shouldBe emptyList() + bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") runBweLoop() @@ -125,11 +148,11 @@ class BitrateControllerTest : ShouldSpec() { } context("and a non-dominant speaker is on stage") { bc.setEndpointOrdering(B, A, C, D) - bc.setStageView("A") + bc.setStageView("A-v0") bc.bc.allocationSettings.lastN shouldBe -1 - bc.bc.allocationSettings.selectedEndpoints shouldBe emptyList() - bc.bc.allocationSettings.onStageEndpoints shouldBe listOf("A") + bc.bc.allocationSettings.selectedSources shouldBe emptyList() + bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") runBweLoop() verifyStageView() @@ -141,8 +164,8 @@ class BitrateControllerTest : ShouldSpec() { bc.setStageView("A", lastN = 0) bc.bc.allocationSettings.lastN shouldBe 0 - bc.bc.allocationSettings.selectedEndpoints shouldBe emptyList() - bc.bc.allocationSettings.onStageEndpoints shouldBe listOf("A") + bc.bc.allocationSettings.selectedSources shouldBe emptyList() + bc.bc.allocationSettings.onStageSources shouldBe listOf("A") runBweLoop() @@ -152,11 +175,11 @@ class BitrateControllerTest : ShouldSpec() { // LastN=1 is used when the client goes in "audio-only" mode, but someone starts a screenshare. context("and the dominant speaker is on-stage") { bc.setEndpointOrdering(A, B, C, D) - bc.setStageView("A", lastN = 1) + bc.setStageView("A-v0", lastN = 1) bc.bc.allocationSettings.lastN shouldBe 1 - bc.bc.allocationSettings.selectedEndpoints shouldBe emptyList() - bc.bc.allocationSettings.onStageEndpoints shouldBe listOf("A") + bc.bc.allocationSettings.selectedSources shouldBe emptyList() + bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") runBweLoop() @@ -164,11 +187,11 @@ class BitrateControllerTest : ShouldSpec() { } context("and a non-dominant speaker is on stage") { bc.setEndpointOrdering(B, A, C, D) - bc.setStageView("A", lastN = 1) + bc.setStageView("A-v0", lastN = 1) bc.bc.allocationSettings.lastN shouldBe 1 - bc.bc.allocationSettings.selectedEndpoints shouldBe emptyList() - bc.bc.allocationSettings.onStageEndpoints shouldBe listOf("A") + bc.bc.allocationSettings.selectedSources shouldBe emptyList() + bc.bc.allocationSettings.onStageSources shouldBe listOf("A-v0") runBweLoop() @@ -178,12 +201,11 @@ class BitrateControllerTest : ShouldSpec() { } context("Tile view") { bc.setEndpointOrdering(A, B, C, D) - bc.setTileView("A", "B", "C", "D") + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0") bc.bc.allocationSettings.lastN shouldBe -1 - // The legacy API (currently used by jitsi-meet) uses "selected count > 0" to infer TileView, - // and in tile view we do not use selected endpoints. - bc.bc.allocationSettings.selectedEndpoints shouldBe listOf("A", "B", "C", "D") + bc.bc.allocationSettings.selectedSources shouldBe + listOf("A-v0", "B-v0", "C-v0", "D-v0") context("When LastN is not set") { runBweLoop() @@ -191,13 +213,13 @@ class BitrateControllerTest : ShouldSpec() { verifyTileView() } context("When LastN=0") { - bc.setTileView("A", "B", "C", "D", lastN = 0) + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 0) runBweLoop() verifyLastN0() } context("When LastN=1") { - bc.setTileView("A", "B", "C", "D", lastN = 1) + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 1) runBweLoop() verifyTileViewLastN1() @@ -205,12 +227,13 @@ class BitrateControllerTest : ShouldSpec() { } context("Tile view 360p") { bc.setEndpointOrdering(A, B, C, D) - bc.setTileView("A", "B", "C", "D", maxFrameHeight = 360) + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", maxFrameHeight = 360) bc.bc.allocationSettings.lastN shouldBe -1 // The legacy API (currently used by jitsi-meet) uses "selected count > 0" to infer TileView, // and in tile view we do not use selected endpoints. - bc.bc.allocationSettings.selectedEndpoints shouldBe listOf("A", "B", "C", "D") + bc.bc.allocationSettings.selectedSources shouldBe + listOf("A-v0", "B-v0", "C-v0", "D-v0") context("When LastN is not set") { runBweLoop() @@ -218,115 +241,115 @@ class BitrateControllerTest : ShouldSpec() { verifyTileView360p() } context("When LastN=0") { - bc.setTileView("A", "B", "C", "D", lastN = 0, maxFrameHeight = 360) + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 0, maxFrameHeight = 360) runBweLoop() verifyLastN0() } context("When LastN=1") { - bc.setTileView("A", "B", "C", "D", lastN = 1, maxFrameHeight = 360) + bc.setTileView("A-v0", "B-v0", "C-v0", "D-v0", lastN = 1, maxFrameHeight = 360) runBweLoop() verifyTileViewLastN1(360) } } - context("Selected endpoints should override the dominant speaker (with new signaling)") { + context("Selected sources should override the dominant speaker (with new signaling)") { // A is dominant speaker, A and B are selected. With LastN=2 we should always forward the selected - // endpoints regardless of who is speaking. + // sources regardless of who is speaking. // The exact flow of this scenario was taken from a (non-jitsi-meet) client. bc.setEndpointOrdering(A, B, C, D) bc.bc.setBandwidthAllocationSettings( ReceiverVideoConstraintsMessage( - selectedEndpoints = listOf("A", "B"), - constraints = mapOf("A" to VideoConstraints(720), "B" to VideoConstraints(720)) + selectedSources = listOf("A-v0", "B-v0"), + constraints = mapOf("A-v0" to VideoConstraints(720), "B-v0" to VideoConstraints(720)) ) ) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(720), - "B" to VideoConstraints(720), - "C" to VideoConstraints(180), - "D" to VideoConstraints(180) + "A-v0" to VideoConstraints(720), + "B-v0" to VideoConstraints(720), + "C-v0" to VideoConstraints(180), + "D-v0" to VideoConstraints(180) ) bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = 2)) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(720), - "B" to VideoConstraints(720), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(720), + "B-v0" to VideoConstraints(720), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) bc.bc.allocationSettings.lastN shouldBe 2 - bc.bc.allocationSettings.selectedEndpoints shouldBe listOf("A", "B") + bc.bc.allocationSettings.selectedSources shouldBe listOf("A-v0", "B-v0") clock.elapse(20.secs) bc.bwe = 10.mbps - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) // B becomes dominant speaker. bc.setEndpointOrdering(B, A, C, D) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) bc.bc.setBandwidthAllocationSettings( ReceiverVideoConstraintsMessage( - constraints = mapOf("A" to VideoConstraints(360), "B" to VideoConstraints(360)) + constraints = mapOf("A-v0" to VideoConstraints(360), "B-v0" to VideoConstraints(360)) ) ) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(360), - "B" to VideoConstraints(360), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(360), + "B-v0" to VideoConstraints(360), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) clock.elapse(2.secs) // This should change nothing, the selection didn't change. bc.bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage(selectedEndpoints = listOf("A", "B")) + ReceiverVideoConstraintsMessage(selectedSources = listOf("A-v0", "B-v0")) ) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = -1)) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(360), - "B" to VideoConstraints(360), - "C" to VideoConstraints(180), - "D" to VideoConstraints(180) + "A-v0" to VideoConstraints(360), + "B-v0" to VideoConstraints(360), + "C-v0" to VideoConstraints(180), + "D-v0" to VideoConstraints(180) ) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B", "C", "D")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0", "C-v0", "D-v0")) bc.bc.setBandwidthAllocationSettings(ReceiverVideoConstraintsMessage(lastN = 2)) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(360), - "B" to VideoConstraints(360), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(360), + "B-v0" to VideoConstraints(360), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) clock.elapse(2.secs) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) // D is now dominant speaker, but it should not override the selected endpoints. bc.setEndpointOrdering(D, B, A, C) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) bc.bwe = 10.mbps - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) bc.bwe = 0.mbps clock.elapse(2.secs) bc.bwe = 10.mbps - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) clock.elapse(2.secs) // C is now dominant speaker, but it should not override the selected endpoints. bc.setEndpointOrdering(C, D, A, B) - bc.forwardedEndpointsHistory.last().event.shouldBe(setOf("A", "B")) + bc.forwardedSourcesHistory.last().event.shouldBe(setOf("A-v0", "B-v0")) } } } @@ -336,7 +359,7 @@ class BitrateControllerTest : ShouldSpec() { bc.bwe = bwe.bps clock.elapse(100.ms) } - logger.info("Forwarded endpoints history: ${bc.forwardedEndpointsHistory}") + logger.info("Forwarded sources history: ${bc.forwardedSourcesHistory}") logger.info("Effective constraints history: ${bc.effectiveConstraintsHistory}") logger.info("Allocation history: ${bc.allocationHistory}") } @@ -344,19 +367,18 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyStageViewScreensharing() { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A"), - setOf("A", "B"), - setOf("A", "B", "C"), - setOf("A", "B", "C", "D") + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0"), + setOf("A-v0", "B-v0"), + setOf("A-v0", "B-v0", "C-v0"), + setOf("A-v0", "B-v0", "C-v0", "D-v0") ) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(720), - "B" to VideoConstraints(180), - "C" to VideoConstraints(180), - "D" to VideoConstraints(180) + "A-v0" to VideoConstraints(720), + "B-v0" to VideoConstraints(180), + "C-v0" to VideoConstraints(180), + "D-v0" to VideoConstraints(180) ) // At this stage the purpose of this is just to document current behavior. @@ -535,19 +557,20 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyStageViewCamera() { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A"), - setOf("A", "B"), - setOf("A", "B", "C"), - setOf("A", "B", "C", "D") + bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0"), + setOf("A-v0", "B-v0"), + setOf("A-v0", "B-v0", "C-v0"), + setOf("A-v0", "B-v0", "C-v0", "D-v0") ) + // TODO add forwarded sources history here bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(720), - "B" to VideoConstraints(180), - "C" to VideoConstraints(180), - "D" to VideoConstraints(180) + "A-v0" to VideoConstraints(720), + "B-v0" to VideoConstraints(180), + "C-v0" to VideoConstraints(180), + "D-v0" to VideoConstraints(180) ) // At this stage the purpose of this is just to document current behavior. @@ -781,13 +804,13 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyLastN0() { // No video forwarded even with high BWE. - bc.forwardedEndpointsHistory.size shouldBe 0 + bc.forwardedSourcesHistory.size shouldBe 0 bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(0), - "B" to VideoConstraints(0), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(0), + "B-v0" to VideoConstraints(0), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) // TODO: The history contains 3 identical elements, which is probably a bug. @@ -806,17 +829,17 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyStageViewLastN1() { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } + bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A") + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0") ) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(720), - "B" to VideoConstraints(0), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(720), + "B-v0" to VideoConstraints(0), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) // At this stage the purpose of this is just to document current behavior. @@ -885,12 +908,12 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyTileView() { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A"), - setOf("A", "B"), - setOf("A", "B", "C"), - setOf("A", "B", "C", "D") + bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0"), + setOf("A-v0", "B-v0"), + setOf("A-v0", "B-v0", "C-v0"), + setOf("A-v0", "B-v0", "C-v0", "D-v0") ) bc.allocationHistory.shouldMatchInOrder( @@ -1043,12 +1066,12 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyTileView360p() { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A"), - setOf("A", "B"), - setOf("A", "B", "C"), - setOf("A", "B", "C", "D") + bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0"), + setOf("A-v0", "B-v0"), + setOf("A-v0", "B-v0", "C-v0"), + setOf("A-v0", "B-v0", "C-v0", "D-v0") ) bc.allocationHistory.shouldMatchInOrder( @@ -1246,16 +1269,16 @@ class BitrateControllerTest : ShouldSpec() { private fun verifyTileViewLastN1(maxFrameHeight: Int = 180) { // At this stage the purpose of this is just to document current behavior. // TODO: The results with bwe==-1 are wrong. - bc.forwardedEndpointsHistory.removeIf { it.bwe < 0.bps } - bc.forwardedEndpointsHistory.map { it.event }.shouldContainInOrder( - setOf("A") + bc.forwardedSourcesHistory.removeIf { it.bwe < 0.bps } + bc.forwardedSourcesHistory.map { it.event }.shouldContainInOrder( + setOf("A-v0") ) bc.effectiveConstraintsHistory.last().event shouldBe mapOf( - "A" to VideoConstraints(maxFrameHeight), - "B" to VideoConstraints(0), - "C" to VideoConstraints(0), - "D" to VideoConstraints(0) + "A-v0" to VideoConstraints(maxFrameHeight), + "B-v0" to VideoConstraints(0), + "C-v0" to VideoConstraints(0), + "D-v0" to VideoConstraints(0) ) val expectedAllocationHistory = mutableListOf( @@ -1327,29 +1350,6 @@ class BitrateControllerTest : ShouldSpec() { } } -fun List>.shouldMatchInOrder(vararg events: Event) { - size shouldBe events.size - events.forEachIndexed { i, it -> - this[i].bwe shouldBe it.bwe - withClue("bwe=${it.bwe}") { - this[i].event.shouldMatch(it.event) - } - // Ignore this.time - } -} - -fun BandwidthAllocation.shouldMatch(other: BandwidthAllocation) { - allocations.size shouldBe other.allocations.size - allocations.forEach { thisSingleAllocation -> - withClue("Allocation for ${thisSingleAllocation.endpointId}") { - val otherSingleAllocation = other.allocations.find { it.endpointId == thisSingleAllocation.endpointId } - otherSingleAllocation.shouldNotBeNull() - thisSingleAllocation.targetLayer?.height shouldBe otherSingleAllocation.targetLayer?.height - thisSingleAllocation.targetLayer?.frameRate shouldBe otherSingleAllocation.targetLayer?.frameRate - } - } -} - class BitrateControllerWrapper(initialEndpoints: List, val clock: FakeClock = FakeClock()) { var endpoints: List = initialEndpoints val logger = createLogger() @@ -1363,21 +1363,22 @@ class BitrateControllerWrapper(initialEndpoints: List, val // Save the output. val effectiveConstraintsHistory: History> = mutableListOf() - val forwardedEndpointsHistory: History> = mutableListOf() + val forwardedSourcesHistory: History> = mutableListOf() val allocationHistory: History = mutableListOf() val bc = BitrateController( object : BitrateController.EventHandler { - override fun forwardedEndpointsChanged(forwardedEndpoints: Set) { - Event(bwe, forwardedEndpoints, clock.instant()).apply { - logger.info("Forwarded endpoints changed: $this") - forwardedEndpointsHistory.add(this) + override fun forwardedEndpointsChanged(forwardedEndpoints: Set) { } + + override fun forwardedSourcesChanged(forwardedSources: Set) { + Event(bwe, forwardedSources, clock.instant()).apply { + logger.info("Forwarded sources changed: $this") + forwardedSourcesHistory.add(this) } } - override fun forwardedSourcesChanged(forwardedSources: Set) { } - - override fun sourceListChanged(sourceList: List) { } + override fun sourceListChanged(sourceList: List) { + } override fun effectiveVideoConstraintsChanged( oldEffectiveConstraints: Map, @@ -1405,7 +1406,7 @@ class BitrateControllerWrapper(initialEndpoints: List, val Supplier { endpoints }, DiagnosticContext(), logger, - false, // TODO merge this test with BitrateControllerNewTest and use this flag + true, // TODO merge BitrateControllerNewTest with old and use this flag false, clock ) @@ -1416,35 +1417,26 @@ class BitrateControllerWrapper(initialEndpoints: List, val bc.endpointOrderingChanged() } - fun setStageView(onStageEndpoint: String, maxFrameHeight: Int = 720, lastN: Int? = null) { + fun setStageView(onStageSource: String, lastN: Int? = null) { bc.setBandwidthAllocationSettings( ReceiverVideoConstraintsMessage( lastN = lastN, - onStageEndpoints = listOf(onStageEndpoint), - constraints = mapOf(onStageEndpoint to VideoConstraints(720)) - ) - ) - } - - fun setSelectedEndpoints(vararg selectedEndpoints: String, maxFrameHeight: Int? = null) { - bc.setBandwidthAllocationSettings( - ReceiverVideoConstraintsMessage( - selectedEndpoints = listOf(*selectedEndpoints), - defaultConstraints = maxFrameHeight?.let { VideoConstraints(it) } + onStageSources = listOf(onStageSource), + constraints = mapOf(onStageSource to VideoConstraints(720)) ) ) } fun setTileView( - vararg selectedEndpoints: String, + vararg selectedSources: String, maxFrameHeight: Int = 180, lastN: Int? = null ) { bc.setBandwidthAllocationSettings( ReceiverVideoConstraintsMessage( lastN = lastN, - selectedEndpoints = listOf(*selectedEndpoints), - constraints = selectedEndpoints.map { it to VideoConstraints(maxFrameHeight) }.toMap() + selectedSources = listOf(*selectedSources), + constraints = selectedSources.map { it to VideoConstraints(maxFrameHeight) }.toMap() ) ) } @@ -1459,46 +1451,56 @@ class BitrateControllerWrapper(initialEndpoints: List, val } } -typealias History = MutableList> -data class Event( - val bwe: Bandwidth, - val event: T, - val time: Instant = Instant.MIN -) { - override fun toString(): String = "\n[time=${time.toEpochMilli()} bwe=$bwe] $event" - override fun equals(other: Any?): Boolean { - if (other !is Event<*>) return false - // Ignore this.time - return bwe == other.bwe && event == other.event - } -} - class TestEndpoint( override val id: String, - override val mediaSource: MediaSourceDesc? = null, - override var videoType: VideoType = VideoType.CAMERA, override val mediaSources: Array = emptyArray(), + override val videoType: VideoType = VideoType.CAMERA, + override val mediaSource: MediaSourceDesc? = null, ) : MediaSourceContainer fun createEndpoints(vararg ids: String): MutableList { return MutableList(ids.size) { i -> TestEndpoint( ids[i], - createSource( - 3 * i + 1, - 3 * i + 2, - 3 * i + 3 + arrayOf( + createSourceDesc( + 3 * i + 1, + 3 * i + 2, + 3 * i + 3, + ids[i] + "-v0", + ids[i] + ) ) ) } } -fun createSource(ssrc1: Int, ssrc2: Int, ssrc3: Int): MediaSourceDesc = MediaSourceDesc( +fun createSources(vararg ids: String): MutableList { + return MutableList(ids.size) { i -> + createSourceDesc( + 3 * i + 1, + 3 * i + 2, + 3 * i + 3, + ids[i], + null + ) + } +} + +fun createSourceDesc( + ssrc1: Int, + ssrc2: Int, + ssrc3: Int, + sourceName: String, + owner: String? +): MediaSourceDesc = MediaSourceDesc( arrayOf( RtpEncodingDesc(ssrc1.toLong(), arrayOf(ld7_5, ld15, ld30)), RtpEncodingDesc(ssrc2.toLong(), arrayOf(sd7_5, sd15, sd30)), RtpEncodingDesc(ssrc3.toLong(), arrayOf(hd7_5, hd15, hd30)) - ) + ), + sourceName = sourceName, + owner = owner ) val bitrateLd = 150.kbps @@ -1547,3 +1549,17 @@ class MockRtpLayerDesc( override fun getBitrate(nowMs: Long): Bandwidth = bitrate override fun hasZeroBitrate(nowMs: Long): Boolean = bitrate == 0.bps } + +typealias History = MutableList> +data class Event( + val bwe: Bandwidth, + val event: T, + val time: Instant = Instant.MIN +) { + override fun toString(): String = "\n[time=${time.toEpochMilli()} bwe=$bwe] $event" + override fun equals(other: Any?): Boolean { + if (other !is Event<*>) return false + // Ignore this.time + return bwe == other.bwe && event == other.event + } +} diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsNewTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsNewTest.kt deleted file mode 100644 index 5dca035d9c..0000000000 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsNewTest.kt +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright @ 2021 - present 8x8, Inc. - * Copyright @ 2021 - Vowel, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.jitsi.videobridge.cc.allocation - -import io.kotest.core.spec.IsolationMode -import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.shouldBe -import org.jitsi.nlj.MediaSourceDesc -import org.jitsi.nlj.VideoType -import org.jitsi.videobridge.cc.config.BitrateControllerConfig - -fun testSource( - endpointId: String, - sourceName: String, - videoType: VideoType = VideoType.CAMERA -): MediaSourceDesc { - return MediaSourceDesc( - emptyArray(), - endpointId, - sourceName, - videoType - ) -} - -@Suppress("NAME_SHADOWING") -class EffectiveConstraintsNewTest : ShouldSpec() { - override fun isolationMode() = IsolationMode.InstancePerLeaf - - init { - val s1 = testSource("e1", "s1") - val s2 = testSource("e1", "s2") - val s3 = testSource("e1", "s3") - val s4 = testSource("e1", "s4", videoType = VideoType.DISABLED) - val s5 = testSource("e1", "s5", videoType = VideoType.DISABLED) - val s6 = testSource("e1", "s6", videoType = VideoType.DISABLED) - - val defaultConstraints = VideoConstraints(BitrateControllerConfig.config.thumbnailMaxHeightPx()) - - val sources = listOf(s1, s2, s3, s4, s5, s6) - val zeroEffectiveConstraints = mutableMapOf( - "s1" to VideoConstraints.NOTHING, - "s2" to VideoConstraints.NOTHING, - "s3" to VideoConstraints.NOTHING, - "s4" to VideoConstraints.NOTHING, - "s5" to VideoConstraints.NOTHING, - "s6" to VideoConstraints.NOTHING - ) - - context("With lastN=0") { - val allocationSettings = AllocationSettings(lastN = 0, defaultConstraints = defaultConstraints) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints - } - context("With lastN=1") { - context("And no other constraints") { - val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = defaultConstraints) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - // The default defaultConstraints are 180 - put("s1", VideoConstraints(180)) - } - } - context("And different defaultConstraints") { - val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = VideoConstraints(360)) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s1", VideoConstraints(360)) - } - } - context("And all constraints 0") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints - } - context("And non-zero constraints for a source with video enabled") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("s1" to VideoConstraints(720)) - - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s1", VideoConstraints(720)) - } - } - context("And non-zero constraints for a source with video DISABLED") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("s4" to VideoConstraints(720)) - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s4", VideoConstraints(720)) - } - } - context("When the top sources have the video DISABLED") { - // The top sources in speaker order have videoType = DISABLED - val sources = listOf(s4, s5, s6, s1, s2, s3) - - context("With default settings") { - val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = defaultConstraints) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s4", VideoConstraints(180)) - } - } - context("With default constraints 0 and non-zero constraints for a source with video DISABLED") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("s5" to VideoConstraints(180)) - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s5", VideoConstraints(180)) - } - } - context("With default constraints 0 and non-zero constraints for a source with video enabled") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("s2" to VideoConstraints(180)) - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s2", VideoConstraints(180)) - } - } - context("With default constraints 0 and non-zero constraints for sources low on the list") { - val allocationSettings = AllocationSettings( - lastN = 1, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("s2" to VideoConstraints(180), "s3" to VideoConstraints(180)) - ) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s2", VideoConstraints(180)) - } - } - } - } - context("With lastN=3") { - context("And default settings") { - val allocationSettings = AllocationSettings(lastN = 3, defaultConstraints = defaultConstraints) - getEffectiveConstraints2(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s1", VideoConstraints(180)) - put("s2", VideoConstraints(180)) - put("s3", VideoConstraints(180)) - } - } - context("When the top sources have video DISABLED") { - // The top sources in speaker order have videoType = DISABLED - val endpoints = listOf(s4, s5, s6, s1, s2, s3) - - context("And default settings") { - val allocationSettings = AllocationSettings(lastN = 3, defaultConstraints = defaultConstraints) - getEffectiveConstraints2(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s4", VideoConstraints(180)) - put("s5", VideoConstraints(180)) - put("s6", VideoConstraints(180)) - } - } - context("And non-zero constraints for sources down the list") { - val allocationSettings = AllocationSettings( - lastN = 3, - defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf( - "s6" to VideoConstraints(180), - "s2" to VideoConstraints(180), - "s3" to VideoConstraints(180) - ) - ) - getEffectiveConstraints2(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("s6", VideoConstraints(180)) - put("s2", VideoConstraints(180)) - put("s3", VideoConstraints(180)) - } - } - } - } - } -} diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsTest.kt index feb8a3fa14..8d763d4954 100644 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsTest.kt +++ b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/EffectiveConstraintsTest.kt @@ -1,5 +1,6 @@ /* * Copyright @ 2021 - present 8x8, Inc. + * Copyright @ 2021 - Vowel, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,49 +19,63 @@ package org.jitsi.videobridge.cc.allocation import io.kotest.core.spec.IsolationMode import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.shouldBe +import org.jitsi.nlj.MediaSourceDesc import org.jitsi.nlj.VideoType import org.jitsi.videobridge.cc.config.BitrateControllerConfig +fun testSource( + endpointId: String, + sourceName: String, + videoType: VideoType = VideoType.CAMERA +): MediaSourceDesc { + return MediaSourceDesc( + emptyArray(), + endpointId, + sourceName, + videoType + ) +} + @Suppress("NAME_SHADOWING") class EffectiveConstraintsTest : ShouldSpec() { override fun isolationMode() = IsolationMode.InstancePerLeaf init { - val e1 = TestEndpoint("e1") - val e2 = TestEndpoint("e2") - val e3 = TestEndpoint("e3") - val e4 = TestEndpoint("e4", videoType = VideoType.NONE) - val e5 = TestEndpoint("e5", videoType = VideoType.NONE) - val e6 = TestEndpoint("e6", videoType = VideoType.NONE) + val s1 = testSource("e1", "s1") + val s2 = testSource("e1", "s2") + val s3 = testSource("e1", "s3") + val s4 = testSource("e1", "s4", videoType = VideoType.DISABLED) + val s5 = testSource("e1", "s5", videoType = VideoType.DISABLED) + val s6 = testSource("e1", "s6", videoType = VideoType.DISABLED) val defaultConstraints = VideoConstraints(BitrateControllerConfig.config.thumbnailMaxHeightPx()) - val endpoints = listOf(e1, e2, e3, e4, e5, e6) + val sources = listOf(s1, s2, s3, s4, s5, s6) val zeroEffectiveConstraints = mutableMapOf( - "e1" to VideoConstraints.NOTHING, - "e2" to VideoConstraints.NOTHING, - "e3" to VideoConstraints.NOTHING, - "e4" to VideoConstraints.NOTHING, - "e5" to VideoConstraints.NOTHING, - "e6" to VideoConstraints.NOTHING + "s1" to VideoConstraints.NOTHING, + "s2" to VideoConstraints.NOTHING, + "s3" to VideoConstraints.NOTHING, + "s4" to VideoConstraints.NOTHING, + "s5" to VideoConstraints.NOTHING, + "s6" to VideoConstraints.NOTHING ) context("With lastN=0") { val allocationSettings = AllocationSettings(lastN = 0, defaultConstraints = defaultConstraints) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints } context("With lastN=1") { context("And no other constraints") { val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = defaultConstraints) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { // The default defaultConstraints are 180 - put("e1", VideoConstraints(180)) + put("s1", VideoConstraints(180)) } } context("And different defaultConstraints") { val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = VideoConstraints(360)) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e1", VideoConstraints(360)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s1", VideoConstraints(360)) } } context("And all constraints 0") { @@ -68,67 +83,67 @@ class EffectiveConstraintsTest : ShouldSpec() { lastN = 1, defaultConstraints = VideoConstraints.NOTHING, ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints } - context("And non-zero constraints for an endpoint with video") { + context("And non-zero constraints for a source with video enabled") { val allocationSettings = AllocationSettings( lastN = 1, defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("e1" to VideoConstraints(720)) + videoConstraints = mapOf("s1" to VideoConstraints(720)) ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e1", VideoConstraints(720)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s1", VideoConstraints(720)) } } - context("And non-zero constraints for and endpoint without video") { + context("And non-zero constraints for a source with video DISABLED") { val allocationSettings = AllocationSettings( lastN = 1, defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("e4" to VideoConstraints(720)) + videoConstraints = mapOf("s4" to VideoConstraints(720)) ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e4", VideoConstraints(720)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s4", VideoConstraints(720)) } } - context("When the top endpoints do not have video") { - // The top endpoints in speaker order have no camera - val endpoints = listOf(e4, e5, e6, e1, e2, e3) + context("When the top sources have the video DISABLED") { + // The top sources in speaker order have videoType = DISABLED + val sources = listOf(s4, s5, s6, s1, s2, s3) context("With default settings") { val allocationSettings = AllocationSettings(lastN = 1, defaultConstraints = defaultConstraints) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e4", VideoConstraints(180)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s4", VideoConstraints(180)) } } - context("With default constraints 0 and non-zero constraints for an endpoint without video") { + context("With default constraints 0 and non-zero constraints for a source with video DISABLED") { val allocationSettings = AllocationSettings( lastN = 1, defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("e5" to VideoConstraints(180)) + videoConstraints = mapOf("s5" to VideoConstraints(180)) ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e5", VideoConstraints(180)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s5", VideoConstraints(180)) } } - context("With default constraints 0 and non-zero constraints for an endpoint with video") { + context("With default constraints 0 and non-zero constraints for a source with video enabled") { val allocationSettings = AllocationSettings( lastN = 1, defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("e2" to VideoConstraints(180)) + videoConstraints = mapOf("s2" to VideoConstraints(180)) ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e2", VideoConstraints(180)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s2", VideoConstraints(180)) } } - context("With default constraints 0 and non-zero constraints for endpoints low on the list") { + context("With default constraints 0 and non-zero constraints for sources low on the list") { val allocationSettings = AllocationSettings( lastN = 1, defaultConstraints = VideoConstraints.NOTHING, - videoConstraints = mapOf("e2" to VideoConstraints(180), "e3" to VideoConstraints(180)) + videoConstraints = mapOf("s2" to VideoConstraints(180), "s3" to VideoConstraints(180)) ) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e2", VideoConstraints(180)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s2", VideoConstraints(180)) } } } @@ -136,38 +151,38 @@ class EffectiveConstraintsTest : ShouldSpec() { context("With lastN=3") { context("And default settings") { val allocationSettings = AllocationSettings(lastN = 3, defaultConstraints = defaultConstraints) - getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e1", VideoConstraints(180)) - put("e2", VideoConstraints(180)) - put("e3", VideoConstraints(180)) + getEffectiveConstraints(sources, allocationSettings) shouldBe zeroEffectiveConstraints.apply { + put("s1", VideoConstraints(180)) + put("s2", VideoConstraints(180)) + put("s3", VideoConstraints(180)) } } - context("When the top endpoints do not have video") { - // The top endpoints in speaker order have no camera - val endpoints = listOf(e4, e5, e6, e1, e2, e3) + context("When the top sources have video DISABLED") { + // The top sources in speaker order have videoType = DISABLED + val endpoints = listOf(s4, s5, s6, s1, s2, s3) context("And default settings") { val allocationSettings = AllocationSettings(lastN = 3, defaultConstraints = defaultConstraints) getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e4", VideoConstraints(180)) - put("e5", VideoConstraints(180)) - put("e6", VideoConstraints(180)) + put("s4", VideoConstraints(180)) + put("s5", VideoConstraints(180)) + put("s6", VideoConstraints(180)) } } - context("And non-zero constraints for endpoints down the list") { + context("And non-zero constraints for sources down the list") { val allocationSettings = AllocationSettings( lastN = 3, defaultConstraints = VideoConstraints.NOTHING, videoConstraints = mapOf( - "e6" to VideoConstraints(180), - "e2" to VideoConstraints(180), - "e3" to VideoConstraints(180) + "s6" to VideoConstraints(180), + "s2" to VideoConstraints(180), + "s3" to VideoConstraints(180) ) ) getEffectiveConstraints(endpoints, allocationSettings) shouldBe zeroEffectiveConstraints.apply { - put("e6", VideoConstraints(180)) - put("e2", VideoConstraints(180)) - put("e3", VideoConstraints(180)) + put("s6", VideoConstraints(180)) + put("s2", VideoConstraints(180)) + put("s3", VideoConstraints(180)) } } } diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2Test.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2Test.kt deleted file mode 100644 index 185e48f71f..0000000000 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocation2Test.kt +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Copyright @ 2021 - present 8x8, Inc. - * Copyright @ 2021 - Vowel, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -@file:Suppress("NAME_SHADOWING") - -package org.jitsi.videobridge.cc.allocation - -import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.shouldBe -import org.jitsi.nlj.MediaSourceDesc -import org.jitsi.nlj.RtpEncodingDesc -import org.jitsi.nlj.VideoType -import org.jitsi.nlj.util.bps -import org.jitsi.nlj.util.kbps -import org.jitsi.utils.logging.DiagnosticContext -import org.jitsi.utils.time.FakeClock - -/** - * Test the logic for selecting the layers to be considered for a source and the "preferred" layer. - */ -class SingleSourceAllocation2Test : ShouldSpec() { - private val clock = FakeClock() - private val diagnosticContext = DiagnosticContext() - - private val ld7_5 = MockRtpLayerDesc(tid = 0, eid = 0, height = 180, frameRate = 7.5, bitrate = bitrateLd * 0.33) - private val ld15 = MockRtpLayerDesc(tid = 1, eid = 0, height = 180, frameRate = 15.0, bitrate = bitrateLd * 0.66) - private val ld30 = MockRtpLayerDesc(tid = 2, eid = 0, height = 180, frameRate = 30.0, bitrate = bitrateLd) - - private val sd7_5 = MockRtpLayerDesc(tid = 0, eid = 1, height = 360, frameRate = 7.5, bitrate = bitrateSd * 0.33) - private val sd15 = MockRtpLayerDesc(tid = 1, eid = 1, height = 360, frameRate = 15.0, bitrate = bitrateSd * 0.66) - private val sd30 = MockRtpLayerDesc(tid = 2, eid = 1, height = 360, frameRate = 30.0, bitrate = bitrateSd) - - private val hd7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = bitrateHd * 0.33) - private val hd15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = bitrateHd * 0.66) - private val hd30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = bitrateHd) - - init { - context("Camera") { - context("When all layers are active") { - val endpointId = "A" - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ), - videoType = VideoType.CAMERA - ) - - context("Without constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(720), false, diagnosticContext, clock - ) - - // We include all resolutions up to the preferred resolution, and only high-FPS (at least - // "preferred FPS") layers for higher resolutions. - allocation.preferredLayer shouldBe sd30 - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe listOf(ld7_5, ld15, ld30, sd30, hd30) - } - context("With constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(360), false, diagnosticContext, clock - ) - - // We include all resolutions up to the preferred resolution, and only high-FPS (at least - // "preferred FPS") layers for higher resolutions. - allocation.preferredLayer shouldBe sd30 - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe listOf(ld7_5, ld15, ld30, sd30) - } - context("With constraints unmet by any layer") { - // Single high-res stream with 3 temporal layers. - val endpointId = "A" - val mediaSource = MediaSourceDesc( - // No simulcast. - arrayOf(RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30))), - videoType = VideoType.CAMERA - ) - - context("Non-zero constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(360), false, diagnosticContext, clock - ) - - // The receiver set 360p constraints, but we only have a 720p stream. - allocation.preferredLayer shouldBe hd30 - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe listOf(hd7_5, hd15, hd30) - } - context("Zero constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(0), false, diagnosticContext, clock - ) - - // The receiver set a maxHeight=0 constraint. - allocation.preferredLayer shouldBe null - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe emptyList() - } - } - } - context("When some layers are inactive") { - // Override layers with bitrate=0. Simulate only up to 360p/15 being active. - val sd30 = MockRtpLayerDesc(tid = 2, eid = 1, height = 360, frameRate = 30.0, bitrate = 0.bps) - val hd7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = 0.bps) - val hd15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = 0.bps) - val hd30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = 0.bps) - val endpointId = "A" - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ), - videoType = VideoType.CAMERA - ) - - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(720), false, diagnosticContext, clock - ) - - // We include all resolutions up to the preferred resolution, and only high-FPS (at least - // "preferred FPS") layers for higher resolutions. - allocation.preferredLayer shouldBe ld30 - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe listOf(ld7_5, ld15, ld30) - } - } - context("Screensharing") { - context("When all layers are active") { - val endpointId = "A" - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ), - videoType = VideoType.DESKTOP - ) - - context("With no constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(720), true, diagnosticContext, clock - ) - - // For screensharing the "preferred" layer should be the highest -- always prioritized over other - // endpoints. - allocation.preferredLayer shouldBe hd30 - allocation.oversendLayer shouldBe hd7_5 - allocation.layers.map { it.layer } shouldBe - listOf(ld7_5, ld15, ld30, sd7_5, sd15, sd30, hd7_5, hd15, hd30) - } - context("With 360p constraints") { - val allocation = - SingleSourceAllocation2( - endpointId, mediaSource, VideoConstraints(360), true, diagnosticContext, clock - ) - - allocation.preferredLayer shouldBe sd30 - allocation.oversendLayer shouldBe sd7_5 - allocation.layers.map { it.layer } shouldBe listOf(ld7_5, ld15, ld30, sd7_5, sd15, sd30) - } - } - context("The high layers are inactive (send-side bwe restrictions)") { - // Override layers with bitrate=0. Simulate only up to 360p/30 being active. - val hd7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = 0.bps) - val hd15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = 0.bps) - val hd30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = 0.bps) - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ), - videoType = VideoType.DESKTOP - ) - - val allocation = - SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock - ) - - // For screensharing the "preferred" layer should be the highest -- always prioritized over other - // endpoints. - allocation.preferredLayer shouldBe sd30 - allocation.oversendLayer shouldBe sd7_5 - allocation.layers.map { it.layer } shouldBe listOf(ld7_5, ld15, ld30, sd7_5, sd15, sd30) - } - context("The low layers are inactive (simulcast signaled but not used)") { - // Override layers with bitrate=0. Simulate simulcast being signaled but effectively disabled. - val ld7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = 0.bps) - val ld15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = 0.bps) - val ld30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = 0.bps) - val sd7_5 = MockRtpLayerDesc(tid = 0, eid = 1, height = 360, frameRate = 7.5, bitrate = 0.bps) - val sd15 = MockRtpLayerDesc(tid = 1, eid = 1, height = 360, frameRate = 15.0, bitrate = 0.bps) - val sd30 = MockRtpLayerDesc(tid = 2, eid = 1, height = 360, frameRate = 30.0, bitrate = 0.bps) - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ), - videoType = VideoType.DESKTOP - ) - - context("With no constraints") { - val allocation = - SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock - ) - - // For screensharing the "preferred" layer should be the highest -- always prioritized over other - // endpoints. - allocation.preferredLayer shouldBe hd30 - allocation.oversendLayer shouldBe hd7_5 - allocation.layers.map { it.layer } shouldBe listOf(hd7_5, hd15, hd30) - } - context("With 180p constraints") { - val allocation = - SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(180), true, diagnosticContext, clock - ) - - // For screensharing the "preferred" layer should be the highest -- always prioritized over other - // endpoints. Since no layers satisfy the resolution constraints, we consider layers from the - // lowest available resolution (which is high). - allocation.preferredLayer shouldBe hd30 - allocation.oversendLayer shouldBe hd7_5 - allocation.layers.map { it.layer } shouldBe listOf(hd7_5, hd15, hd30) - } - } - context("VP9") { - val l1 = MockRtpLayerDesc(tid = 0, eid = 0, sid = 0, height = 720, frameRate = -1.0, bitrate = 150.kbps) - val l2 = MockRtpLayerDesc(tid = 0, eid = 0, sid = 1, height = 720, frameRate = -1.0, bitrate = 370.kbps) - val l3 = MockRtpLayerDesc(tid = 0, eid = 0, sid = 2, height = 720, frameRate = -1.0, bitrate = 750.kbps) - - val mediaSource = MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(l1)), - RtpEncodingDesc(1L, arrayOf(l2)), - RtpEncodingDesc(1L, arrayOf(l3)) - ), - videoType = VideoType.DESKTOP - ) - - context("With no constraints") { - val allocation = - SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock - ) - - allocation.preferredLayer shouldBe l3 - allocation.oversendLayer shouldBe l1 - allocation.layers.map { it.layer } shouldBe listOf(l1, l2, l3) - } - context("With 180p constraints") { - // For screensharing the "preferred" layer should be the highest -- always prioritized over other - // endpoints. Since no layers satisfy the resolution constraints, we consider layers from the - // lowest available resolution (which is high). If we are off-stage we only consider the first of - // these layers. - context("On stage") { - val allocation = SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(180), true, diagnosticContext, clock - ) - - allocation.preferredLayer shouldBe l3 - allocation.oversendLayer shouldBe l1 - allocation.layers.map { it.layer } shouldBe listOf(l1, l2, l3) - } - context("Off stage") { - val allocation = SingleSourceAllocation2( - "A", mediaSource, VideoConstraints(180), false, diagnosticContext, clock - ) - - allocation.preferredLayer shouldBe l1 - allocation.oversendLayer shouldBe null - allocation.layers.map { it.layer } shouldBe listOf(l1) - } - } - } - } - } -} diff --git a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocationTest.kt b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocationTest.kt index 088f9e4757..874d341536 100644 --- a/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocationTest.kt +++ b/jvb/src/test/kotlin/org/jitsi/videobridge/cc/allocation/SingleSourceAllocationTest.kt @@ -1,5 +1,6 @@ /* * Copyright @ 2021 - present 8x8, Inc. + * Copyright @ 2021 - Vowel, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,7 +29,7 @@ import org.jitsi.utils.logging.DiagnosticContext import org.jitsi.utils.time.FakeClock /** - * Test the logic for selecting the layers to be considered for an endpoint and the "preferred" layer. + * Test the logic for selecting the layers to be considered for a source and the "preferred" layer. */ class SingleSourceAllocationTest : ShouldSpec() { private val clock = FakeClock() @@ -49,21 +50,21 @@ class SingleSourceAllocationTest : ShouldSpec() { init { context("Camera") { context("When all layers are active") { - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ) + val endpointId = "A" + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), + RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), + RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) ), videoType = VideoType.CAMERA ) context("Without constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), false, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(720), false, diagnosticContext, clock + ) // We include all resolutions up to the preferred resolution, and only high-FPS (at least // "preferred FPS") layers for higher resolutions. @@ -73,7 +74,9 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("With constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(360), false, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(360), false, diagnosticContext, clock + ) // We include all resolutions up to the preferred resolution, and only high-FPS (at least // "preferred FPS") layers for higher resolutions. @@ -83,17 +86,18 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("With constraints unmet by any layer") { // Single high-res stream with 3 temporal layers. - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - // No simulcast. - arrayOf(RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30))) - ) + val endpointId = "A" + val mediaSource = MediaSourceDesc( + // No simulcast. + arrayOf(RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30))), + videoType = VideoType.CAMERA ) context("Non-zero constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(360), false, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(360), false, diagnosticContext, clock + ) // The receiver set 360p constraints, but we only have a 720p stream. allocation.preferredLayer shouldBe hd30 @@ -102,7 +106,9 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("Zero constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(0), false, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(0), false, diagnosticContext, clock + ) // The receiver set a maxHeight=0 constraint. allocation.preferredLayer shouldBe null @@ -117,20 +123,20 @@ class SingleSourceAllocationTest : ShouldSpec() { val hd7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = 0.bps) val hd15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = 0.bps) val hd30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = 0.bps) - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ) + val endpointId = "A" + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), + RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), + RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) ), videoType = VideoType.CAMERA ) val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), false, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(720), false, diagnosticContext, clock + ) // We include all resolutions up to the preferred resolution, and only high-FPS (at least // "preferred FPS") layers for higher resolutions. @@ -141,21 +147,21 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("Screensharing") { context("When all layers are active") { - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ) + val endpointId = "A" + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), + RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), + RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) ), videoType = VideoType.DESKTOP ) context("With no constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), true, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(720), true, diagnosticContext, clock + ) // For screensharing the "preferred" layer should be the highest -- always prioritized over other // endpoints. @@ -166,7 +172,9 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("With 360p constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(360), true, diagnosticContext, clock) + SingleSourceAllocation( + endpointId, mediaSource, VideoConstraints(360), true, diagnosticContext, clock + ) allocation.preferredLayer shouldBe sd30 allocation.oversendLayer shouldBe sd7_5 @@ -178,20 +186,19 @@ class SingleSourceAllocationTest : ShouldSpec() { val hd7_5 = MockRtpLayerDesc(tid = 0, eid = 2, height = 720, frameRate = 7.5, bitrate = 0.bps) val hd15 = MockRtpLayerDesc(tid = 1, eid = 2, height = 720, frameRate = 15.0, bitrate = 0.bps) val hd30 = MockRtpLayerDesc(tid = 2, eid = 2, height = 720, frameRate = 30.0, bitrate = 0.bps) - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ) + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), + RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), + RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) ), videoType = VideoType.DESKTOP ) val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), true, diagnosticContext, clock) + SingleSourceAllocation( + "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock + ) // For screensharing the "preferred" layer should be the highest -- always prioritized over other // endpoints. @@ -207,21 +214,20 @@ class SingleSourceAllocationTest : ShouldSpec() { val sd7_5 = MockRtpLayerDesc(tid = 0, eid = 1, height = 360, frameRate = 7.5, bitrate = 0.bps) val sd15 = MockRtpLayerDesc(tid = 1, eid = 1, height = 360, frameRate = 15.0, bitrate = 0.bps) val sd30 = MockRtpLayerDesc(tid = 2, eid = 1, height = 360, frameRate = 30.0, bitrate = 0.bps) - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), - RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), - RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) - ) + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(ld7_5, ld15, ld30)), + RtpEncodingDesc(1L, arrayOf(sd7_5, sd15, sd30)), + RtpEncodingDesc(1L, arrayOf(hd7_5, hd15, hd30)) ), videoType = VideoType.DESKTOP ) context("With no constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), true, diagnosticContext, clock) + SingleSourceAllocation( + "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock + ) // For screensharing the "preferred" layer should be the highest -- always prioritized over other // endpoints. @@ -231,7 +237,9 @@ class SingleSourceAllocationTest : ShouldSpec() { } context("With 180p constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(180), true, diagnosticContext, clock) + SingleSourceAllocation( + "A", mediaSource, VideoConstraints(180), true, diagnosticContext, clock + ) // For screensharing the "preferred" layer should be the highest -- always prioritized over other // endpoints. Since no layers satisfy the resolution constraints, we consider layers from the @@ -246,21 +254,20 @@ class SingleSourceAllocationTest : ShouldSpec() { val l2 = MockRtpLayerDesc(tid = 0, eid = 0, sid = 1, height = 720, frameRate = -1.0, bitrate = 370.kbps) val l3 = MockRtpLayerDesc(tid = 0, eid = 0, sid = 2, height = 720, frameRate = -1.0, bitrate = 750.kbps) - val endpoint = TestEndpoint( - "id", - MediaSourceDesc( - arrayOf( - RtpEncodingDesc(1L, arrayOf(l1)), - RtpEncodingDesc(1L, arrayOf(l2)), - RtpEncodingDesc(1L, arrayOf(l3)) - ) + val mediaSource = MediaSourceDesc( + arrayOf( + RtpEncodingDesc(1L, arrayOf(l1)), + RtpEncodingDesc(1L, arrayOf(l2)), + RtpEncodingDesc(1L, arrayOf(l3)) ), videoType = VideoType.DESKTOP ) context("With no constraints") { val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(720), true, diagnosticContext, clock) + SingleSourceAllocation( + "A", mediaSource, VideoConstraints(720), true, diagnosticContext, clock + ) allocation.preferredLayer shouldBe l3 allocation.oversendLayer shouldBe l1 @@ -272,16 +279,18 @@ class SingleSourceAllocationTest : ShouldSpec() { // lowest available resolution (which is high). If we are off-stage we only consider the first of // these layers. context("On stage") { - val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(180), true, diagnosticContext, clock) + val allocation = SingleSourceAllocation( + "A", mediaSource, VideoConstraints(180), true, diagnosticContext, clock + ) allocation.preferredLayer shouldBe l3 allocation.oversendLayer shouldBe l1 allocation.layers.map { it.layer } shouldBe listOf(l1, l2, l3) } context("Off stage") { - val allocation = - SingleSourceAllocation(endpoint, VideoConstraints(180), false, diagnosticContext, clock) + val allocation = SingleSourceAllocation( + "A", mediaSource, VideoConstraints(180), false, diagnosticContext, clock + ) allocation.preferredLayer shouldBe l1 allocation.oversendLayer shouldBe null