diff --git a/index.d.ts b/index.d.ts index 7ec95e8678..bc4e795bdc 100644 --- a/index.d.ts +++ b/index.d.ts @@ -984,6 +984,8 @@ export interface Representation { bitsPerPixel: number; codecPrivateData: string | null; codecs: string | null; + dependencyId: string | null; + dependentRepresentation: object | null; fragmentDuration: number | null; frameRate: number; height: number; @@ -2868,6 +2870,7 @@ export interface conformanceViolationConstants { export interface Constants { STREAM: 'stream', VIDEO: 'video', + ENHANCEMENT: 'enhancement', AUDIO: 'audio', TEXT: 'text', MUXED: 'muxed', @@ -5907,6 +5910,8 @@ export interface StreamProcessor { selectMediaInfo(selectionInput: object): Promise; + setEnhancementStreamProcessor(value: StreamProcessor): void; + setExplicitBufferingTime(value: number): void; setMediaSource(mediaSource: MediaSource): void; @@ -5914,6 +5919,34 @@ export interface StreamProcessor { updateStreamInfo(newStreamInfo: StreamInfo): Promise; } +export interface ExternalMediaSource { + duration: number | null; + + readyState: string; + + addSourceBuffer(mimeType: string): ExternalSourceBuffer; + + close(): void; + + endOfStream(): void; + + open(): void; + + removeSourceBuffer(sourceBuffer: ExternalSourceBuffer): void; + + reset(): void; +} + +export interface ExternalSourceBuffer { + buffered: TimeRanges; + + abort(): void; + + appendBuffer(segmentData: ArrayBuffer, segmentStartTime: number, segmentEndTime: number): void; + + remove(start: number, end: number): void; +} + export interface XlinkLoader { load(url: string, element: any, resolveObject: object): void; diff --git a/samples/dash-if-reference-player/app/contributors.json b/samples/dash-if-reference-player/app/contributors.json index 67d7122853..fa0141008c 100644 --- a/samples/dash-if-reference-player/app/contributors.json +++ b/samples/dash-if-reference-player/app/contributors.json @@ -68,6 +68,11 @@ "name": "Broadpeak", "logo": "app/img/broadpeak.png", "link": "https://broadpeak.tv/" + }, + { + "name": "V-Nova", + "logo": "app/img/v-nova.png", + "link": "https://v-nova.com/" } ] } diff --git a/samples/dash-if-reference-player/app/css/main.css b/samples/dash-if-reference-player/app/css/main.css index 73d2e431e1..ae12795de5 100644 --- a/samples/dash-if-reference-player/app/css/main.css +++ b/samples/dash-if-reference-player/app/css/main.css @@ -153,6 +153,7 @@ a:hover { .dash-video-player { background-color: #000000; + position: relative; } .col-md-9 video { @@ -179,6 +180,10 @@ a:hover { margin-top: -5px !important; } +.element-hidden { + display: none !important; +} + .btn-play-pause, .control-icon-layout { padding: 4px 10px !important; diff --git a/samples/dash-if-reference-player/app/img/v-nova.png b/samples/dash-if-reference-player/app/img/v-nova.png new file mode 100644 index 0000000000..bcda91cbbc Binary files /dev/null and b/samples/dash-if-reference-player/app/img/v-nova.png differ diff --git a/samples/dash-if-reference-player/app/main.js b/samples/dash-if-reference-player/app/main.js index 6273cf8608..b69a2bda86 100644 --- a/samples/dash-if-reference-player/app/main.js +++ b/samples/dash-if-reference-player/app/main.js @@ -238,6 +238,8 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.conformanceViolations = []; + $scope.enhancementDecoder = null; + var defaultExternalSettings = { mpd: encodeURIComponent('https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps.mpd'), loop: true, @@ -1035,6 +1037,126 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }); }; + $scope.toggleEnhancementEnabled = function () { + const video = document.querySelector('video'); + const canvas = document.querySelector('canvas'); + + if ($scope.enhancementEnabled) { + canvas.classList.remove('element-hidden'); + video.classList.add('element-hidden'); + } else { + canvas.classList.add('element-hidden'); + video.classList.remove('element-hidden'); + } + }; + + $scope.setupEnhancementDecoder = function () { + /** + * MPEG-5 LCEVC Integration for Dash.js Player. + * + * These are the changes needed for passing the correct + * data to lcevc_dec.js and trigger the correct methods + * at the correct time. + */ + + /** + * Let the LCEVC Decoder Library make the decision as to when to switch, based on the currently + * rendered frame. If disabled, the player needs to signal LCEVC when there is a render change + * after an ABR switch happens. + * + * @readonly + * @enum {number} + * @public + */ + const AutoRenderMode = { + DISABLED: 0, + ENABLED: 1 + }; + + dashjs.Extensions = { + ...dashjs.Extensions, + /** + * Attaches LCEVC functionality and methods to the provided Dash.js player instance. + * + * @param {object} player the Dash.js player instance to attach LCEVC to + */ + useLcevc: function useLcevc(player) { + if (!player) { + throw new TypeError('The provided Dash.js player instance was null or undefined.'); + } + const { LCEVCdec } = window; + if (!LCEVCdec) { + throw new TypeError('LCEVC Decoder Libraries could not be loaded.'); + } + + let abrIndex = -1; + + player.attachLcevc = function attachLcevc(media, canvas, LCEVCdecConfig) { + player.LCEVCdec = new LCEVCdec.LCEVCdec( + media, + canvas, + LCEVCdecConfig + ); + + /* Signal profile information and switches to LCEVCdecJS */ + player.on(dashjs.MediaPlayer.events.QUALITY_CHANGE_REQUESTED, handleQualityChange); + player.on(dashjs.MediaPlayer.events.FRAGMENT_LOADING_COMPLETED, handleFragmentLoadingCompleted); + player.on(dashjs.MediaPlayer.events.REPRESENTATION_SWITCH, handleRepresentationSwitch); + player.on('externalSourceBufferUpdateStart', handleBufferUpdates); + }; + + function handleFragmentLoadingCompleted(event) { + if (event.mediaType === 'enhancement') { + abrIndex = event.request.representation.absoluteIndex; + } + } + + function handleQualityChange(event) { + if (event.mediaType === 'video' || event.mediaType === 'enhancement') { + const index = event.newRepresentation.absoluteIndex; + console.log('>>> requested:', event.mediaType, index); + player.LCEVCdec.setLevelSwitching(index, AutoRenderMode.ENABLED); + } + } + + function handleRepresentationSwitch(event) { + if (event.mediaType === 'video' || event.mediaType === 'enhancement') { + const rep = event.currentRepresentation; + const index = rep.absoluteIndex; + // Workaround for very first representation played for which no QUALITY_CHANGE_REQUESTED arrives + if (rep && rep.dependentRepresentation) { + console.log('>>> rep switch:', event.mediaType, index); + player.LCEVCdec.setLevelSwitching(index, AutoRenderMode.ENABLED); + } + } + } + + function handleBufferUpdates(event) { + if (event.request === 'appendBuffer') { + player.LCEVCdec.appendBuffer(event.data, 'video', abrIndex, 0, /* isMuxed */ false); + } + else if (event.request === 'remove') { + player.LCEVCdec.flushBuffer(event.start, event.end); + } + } + } + }; + + const video = document.querySelector('video'); + const canvas = document.querySelector('canvas'); + const LCEVCdecConfig = { + dynamicPerformanceScaling: false + }; + + window.LCEVCdec.ready.then(() => { + /* Attach LCEVC to the Dash.js player instance */ + const player = $scope.player; + dashjs.Extensions.useLcevc(player); + player.attachLcevc(video, canvas, LCEVCdecConfig); + $scope.enhancementDecoder = player.LCEVCdec; + }); + }; + $scope.toggleCmsdApplyMb = function () { $scope.player.updateSettings({ streaming: { @@ -1104,7 +1226,8 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' liveDelay: $scope.defaultLiveDelay }, abr: {}, - cmcd: {} + cmcd: {}, + enhancement: {} } }; @@ -1170,6 +1293,21 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' config.streaming.cmcd.rtpSafetyFactor = $scope.cmcdRtpSafetyFactor ? $scope.cmcdRtpSafetyFactor : null; config.streaming.cmcd.enabledKeys = $scope.cmcdEnabledKeys ? $scope._getFormatedCmcdEnabledKeys() : []; + // Cleanup enhancement decoder if it exists from previous playback + if ($scope.enhancementDecoder) { + $scope.enhancementDecoder.close(); + $scope.enhancementDecoder = null; + } + + // Setup enhancement decoder if checkbox is checked or if stream is from V-Nova + if ($scope.enhancementEnabled || $scope.selectedItem.provider === 'v-nova') { + config.streaming.enhancement.enabled = true; + $scope.enhancementEnabled = true; + $scope.setupEnhancementDecoder(); + } + + $scope.toggleEnhancementEnabled(); + $scope.player.updateSettings(config); $scope.controlbar.reset(); diff --git a/samples/dash-if-reference-player/app/sources.json b/samples/dash-if-reference-player/app/sources.json index b7729ac63d..58030fe57b 100644 --- a/samples/dash-if-reference-player/app/sources.json +++ b/samples/dash-if-reference-player/app/sources.json @@ -93,6 +93,11 @@ "acronym": "ARTE", "name": "ARTE", "url": "https://www.arte.tv/en/" + }, + "v-nova": { + "acronym": "V-Nova", + "name": "V-Nova", + "url": "https://v-nova.com/" } }, "items": [ @@ -1303,6 +1308,21 @@ "provider": "microsoft" } ] + }, + { + "name": "MPEG-5 Part 2 - LCEVC", + "submenu": [ + { + "url": "https://s3.eu-west-1.amazonaws.com/origin-prod-lon-v-nova.com/lcevcDualTrack/1080p30_4Mbps_no_dR/master.mpd", + "name": "Scalable Carriage", + "provider": "v-nova" + }, + { + "url": "https://s3.eu-west-1.amazonaws.com/origin-prod-lon-v-nova.com/lcevcDualTrack/1080p30_4Mbps_with_dR/master.mpd", + "name": "Scalable with Debug Residuals", + "provider": "v-nova" + } + ] } ] } diff --git a/samples/dash-if-reference-player/index.html b/samples/dash-if-reference-player/index.html index 8f6f2bf294..6dd38d1753 100644 --- a/samples/dash-if-reference-player/index.html +++ b/samples/dash-if-reference-player/index.html @@ -34,6 +34,9 @@ + + + @@ -1003,6 +1006,17 @@ ng-change="updateCmsdEtpWeightRatio()"> +
+
Enhancement Layer
+
+ +
+
@@ -1011,6 +1025,8 @@
+ +
{{ castPlayerState === 'IDLE' ? 'Ready to cast stream' : castPlayerState }} diff --git a/samples/lcevc/lcevc-dual-track-debug.html b/samples/lcevc/lcevc-dual-track-debug.html new file mode 100644 index 0000000000..8022871353 --- /dev/null +++ b/samples/lcevc/lcevc-dual-track-debug.html @@ -0,0 +1,254 @@ + + + + + MPEG-5 LCEVC example + + + + + + + + + + + + + + + + + + + +
+
+
+ +
+
+
+
+

MPEG-5 Part 2 LCEVC Dual Track Implementation

+

This example shows how content with LCEVC Enhancements in a separate track can be played + back by the dash.js + player. dash.js allows a switch between Enhanced and Base profiles.

+
+
+
+
+
+ + +
+
+ +
+ 00:00:00 +
+ +
+
+ +
+ +
+ +
+
+ +
+
+ +
+ 00:00:00 +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ © DASH-IF +
+
+
+ + + diff --git a/samples/lcevc/lcevc-dual-track.html b/samples/lcevc/lcevc-dual-track.html new file mode 100644 index 0000000000..3ae25fdf59 --- /dev/null +++ b/samples/lcevc/lcevc-dual-track.html @@ -0,0 +1,254 @@ + + + + + MPEG-5 LCEVC example + + + + + + + + + + + + + + + + + + + +
+
+
+ +
+
+
+
+

MPEG-5 Part 2 LCEVC Dual Track Implementation

+

This example shows how content with LCEVC Enhancements in a separate track can be played + back by the dash.js + player. dash.js allows a switch between Enhanced and Base profiles.

+
+
+
+
+
+ + +
+
+ +
+ 00:00:00 +
+ +
+
+ +
+ +
+ +
+
+ +
+
+ +
+ 00:00:00 +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ © DASH-IF +
+
+
+ + + diff --git a/samples/lcevc-sei/lcevc-sei.html b/samples/lcevc/lcevc-sei.html similarity index 99% rename from samples/lcevc-sei/lcevc-sei.html rename to samples/lcevc/lcevc-sei.html index ed2f13965c..29a0a5cd6f 100644 --- a/samples/lcevc-sei/lcevc-sei.html +++ b/samples/lcevc/lcevc-sei.html @@ -173,4 +173,4 @@

MPEG-5 Part 2 LCEVC Decoding with SEI carriage

}); - + \ No newline at end of file diff --git a/samples/samples.json b/samples/samples.json index b883f2f47b..357d5e1552 100644 --- a/samples/samples.json +++ b/samples/samples.json @@ -844,9 +844,31 @@ "section": "MPEG-5 Part 2 - LCEVC", "samples": [ { - "title": "SEI Implementation", - "description": "A sample showing how to use the MPEG-5 Part 2 LCEVC in an SEI Based enhancement delivery.", - "href": "lcevc-sei/lcevc-sei.html", + "title": "Scalable LCEVC carriage", + "description": "A sample demonstrating how to use MPEG-5 Part 2 LCEVC for scalable delivery, where higher resolution representations are generated by applying LCEVC to the corresponding lower resolution versions.", + "href": "lcevc/lcevc-dual-track.html", + "image": "lib/img/bbb-1.jpg", + "labels": [ + "Module", + "Video", + "Audio" + ] + }, + { + "title": "Scalable with Debug Residuals", + "description": "A sample demonstrating how to use MPEG-5 Part 2 LCEVC for scalable delivery, where higher resolution representations are generated by applying LCEVC to the corresponding lower resolution versions. This version includes debug Residuals resulting in moving squares when LCEVC is being correctly decoded", + "href": "lcevc/lcevc-dual-track-debug.html", + "image": "lib/img/bbb-1.jpg", + "labels": [ + "Module", + "Video", + "Audio" + ] + }, + { + "title": "LCEVC using SEI carriage", + "description": "A sample showing how to use MPEG-5 Part 2 LCEVC using SEI carriage to enhance the stream it is included in", + "href": "lcevc/lcevc-sei.html", "image": "lib/img/bbb-1.jpg", "labels": [ "Module", diff --git a/src/core/Settings.js b/src/core/Settings.js index 063d385ca5..c634d9f0d8 100644 --- a/src/core/Settings.js +++ b/src/core/Settings.js @@ -331,6 +331,10 @@ import SwitchRequest from '../streaming/rules/SwitchRequest.js'; * etpWeightRatio: 0 * } * }, + * enhancement: { + * enabled: false, + * codecs: ['lvc1'] + * }, * defaultSchemeIdUri: { * viewpoint: '', * audioChannelConfiguration: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', @@ -941,6 +945,16 @@ import SwitchRequest from '../streaming/rules/SwitchRequest.js'; * Sets the weight ratio (between 0 and 1) that shall be applied on CMSD estimated throuhgput compared to measured throughput when calculating throughput. */ +/** + * @typedef {Object} EnhancementSettings + * @property {boolean} [enabled=false] + * Enable or disable the scalable enhancement playback (e.g. LCEVC). + * @property {Array.} [codecs] + * Specifies which scalable enhancement codecs are supported by the player. + * + * If not specified this value defaults to ['lvc1']. + */ + /** * @typedef {Object} Metrics * @property {number} [metricsMaxListDepth=100] @@ -1067,6 +1081,8 @@ import SwitchRequest from '../streaming/rules/SwitchRequest.js'; * Settings related to Common Media Client Data reporting. * @property {module:Settings~CmsdSettings} cmsd * Settings related to Common Media Server Data parsing. + * @property {module:Settings~EnhancementSettings} enhancement + * Settings related to scalable enhancement playback (e.g. LCEVC). * @property {module:Settings~defaultSchemeIdUri} defaultSchemeIdUri * Default schemeIdUri for descriptor type elements * These strings are used when not provided with setInitialMediaSettingsFor() @@ -1406,6 +1422,10 @@ function Settings() { etpWeightRatio: 0 } }, + enhancement: { + enabled: false, + codecs: ['lvc1'] + }, defaultSchemeIdUri: { viewpoint: '', audioChannelConfiguration: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', diff --git a/src/dash/DashAdapter.js b/src/dash/DashAdapter.js index 16fb3f8d0c..42dc5c55bb 100644 --- a/src/dash/DashAdapter.js +++ b/src/dash/DashAdapter.js @@ -1066,7 +1066,9 @@ function DashAdapter() { mediaInfo.id = adaptation.id; mediaInfo.index = adaptation.index; - mediaInfo.type = adaptation.type; + mediaInfo.codec = dashManifestModel.getCodec(realAdaptation); + const enhancementCodecs = settings.get().streaming.enhancement.codecs; + mediaInfo.type = enhancementCodecs.some(codec => mediaInfo.codec?.includes(codec)) ? Constants.ENHANCEMENT : adaptation.type; mediaInfo.streamInfo = convertPeriodToStreamInfo(adaptation.period); mediaInfo.representationCount = dashManifestModel.getRepresentationCount(realAdaptation); mediaInfo.labels = dashManifestModel.getLabelsForAdaptation(realAdaptation); @@ -1087,7 +1089,6 @@ function DashAdapter() { mediaInfo.audioChannelConfiguration = dashManifestModel.getAudioChannelConfigurationForRepresentation(realAdaptation.Representation[0]); } mediaInfo.roles = dashManifestModel.getRolesForAdaptation(realAdaptation); - mediaInfo.codec = dashManifestModel.getCodec(realAdaptation); mediaInfo.mimeType = dashManifestModel.getMimeType(realAdaptation); mediaInfo.contentProtection = dashManifestModel.getContentProtectionByAdaptation(realAdaptation); mediaInfo.bitrateList = dashManifestModel.getBitrateListForAdaptation(realAdaptation); diff --git a/src/dash/controllers/RepresentationController.js b/src/dash/controllers/RepresentationController.js index 4f6abd1e81..ef809098b9 100644 --- a/src/dash/controllers/RepresentationController.js +++ b/src/dash/controllers/RepresentationController.js @@ -72,6 +72,35 @@ function RepresentationController(config) { } function getCurrentRepresentation() { + // Video RepresentationController should return a representation of type video, and enhancement + // RepresentationController should return a representation of type enhancement, i.e. type should match + if (currentVoRepresentation?.mediaInfo.type === type) { + return currentVoRepresentation; + } + else { + return _getCurrentDependentRepresentation(); + } + + } + + function _getCurrentDependentRepresentation() { + let currentVoRepDep = currentVoRepresentation?.dependentRepresentation; + if (currentVoRepDep) { + if (!currentVoRepDep.mediaInfo) { + throw new Error('dependentRepresentation has no mediaInfo!'); + } + if (currentVoRepDep.mediaInfo.type === type) { + return currentVoRepDep; + } + } + return null; + } + + /** + * Returns the combined effective Representation, i.e. the dependent representation plus its declared complementary representation. + * @return {object} Representation + */ + function getCurrentCompositeRepresentation() { return currentVoRepresentation; } @@ -92,8 +121,7 @@ function RepresentationController(config) { const selectedRepresentation = getRepresentationById(selectedRepresentationId); _setCurrentVoRepresentation(selectedRepresentation); - - if (type !== Constants.VIDEO && type !== Constants.AUDIO && (type !== Constants.TEXT || !isFragmented)) { + if (type !== Constants.VIDEO && type !== Constants.ENHANCEMENT && type !== Constants.AUDIO && (type !== Constants.TEXT || !isFragmented)) { endDataUpdate(); resolve(); return; @@ -305,6 +333,7 @@ function RepresentationController(config) { } instance = { + getCurrentCompositeRepresentation, getCurrentRepresentation, getRepresentationById, getStreamId, diff --git a/src/dash/models/DashManifestModel.js b/src/dash/models/DashManifestModel.js index 66a125a073..87640381ac 100644 --- a/src/dash/models/DashManifestModel.js +++ b/src/dash/models/DashManifestModel.js @@ -764,6 +764,17 @@ function DashManifestModel() { voRepresentation.codecs = realRepresentation.codecs; voRepresentation.codecFamily = Utils.getCodecFamily(voRepresentation.codecs); } + if (realRepresentation.hasOwnProperty(DashConstants.DEPENDENCY_ID)) { + // According to spec, the DEPENDENCY_ID attribute is a space-separated list of ID values + // Only using the first ID from this list as the handling of multiple IDs is not supported yet + const dependencyIdListString = realRepresentation[DashConstants.DEPENDENCY_ID].toString(); + const dependencyIds = dependencyIdListString.split(' '); + const dependencyId = dependencyIds[0]; + + voRepresentation.dependencyId = dependencyId; + voRepresentation.dependentRepresentation = new Representation(); + voRepresentation.dependentRepresentation.id = dependencyId; + } if (realRepresentation.hasOwnProperty(DashConstants.MIME_TYPE)) { voRepresentation.mimeType = realRepresentation[DashConstants.MIME_TYPE]; } diff --git a/src/dash/vo/Representation.js b/src/dash/vo/Representation.js index 9ef63216e7..928ef1d2d7 100644 --- a/src/dash/vo/Representation.js +++ b/src/dash/vo/Representation.js @@ -48,6 +48,8 @@ class Representation { this.codecFamily = null; this.codecPrivateData = null; this.codecs = null; + this.dependencyId = null; + this.dependentRepresentation = null; this.essentialProperties = []; this.fragmentDuration = null; this.frameRate = null; diff --git a/src/streaming/ExternalMediaSource.js b/src/streaming/ExternalMediaSource.js new file mode 100644 index 0000000000..e89eec8b44 --- /dev/null +++ b/src/streaming/ExternalMediaSource.js @@ -0,0 +1,95 @@ +/** + * The copyright in this software is being made available under the BSD License, + * included below. This software may be subject to other third party and contributor + * rights, including patent rights, and no such rights are granted under this license. + * + * Copyright (c) 2013, Dash Industry Forum. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * * Neither the name of Dash Industry Forum nor the names of its + * contributors may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +import ExternalSourceBuffer from './ExternalSourceBuffer.js'; + +class ExternalMediaSource { + constructor(eventBus) { + this.eventBus = eventBus; + this.reset(); + } + + get duration() { + return this._duration; + } + + set duration(value) { + if (this._readyState !== 'open') { + throw new Error('ExternalMediaSource is not open'); + } + this._duration = value; + } + + get readyState() { + return this._readyState; + } + + addSourceBuffer(mimeType) { + if (this._readyState !== 'open') { + throw new Error('ExternalMediaSource is not open'); + } + const sourceBuffer = new ExternalSourceBuffer(mimeType, this.eventBus); + this.sourceBuffers.set(sourceBuffer, mimeType); + return sourceBuffer; + } + + removeSourceBuffer(sourceBuffer) { + if (!(this.sourceBuffers.has(sourceBuffer))) { + throw new Error('ExternalSourceBuffer not found'); + } + this.sourceBuffers.delete(sourceBuffer); + } + + open() { + this._readyState = 'open'; + this.eventBus.trigger('externalMediaSourceOpen', { }); + } + + endOfStream() { + if (this._readyState !== 'open') { + throw new Error('ExternalMediaSource is not open'); + } + this._readyState = 'ended'; + this.eventBus.trigger('externalMediaSourceEnded', { }); + } + + close() { + this._readyState = 'closed'; + this.eventBus.trigger('externalMediaSourceClosed', { }); + } + + reset() { + this.sourceBuffers = new Map(); + this._duration = NaN; + this._readyState = 'closed'; + } +} + +export default ExternalMediaSource; diff --git a/src/streaming/ExternalSourceBuffer.js b/src/streaming/ExternalSourceBuffer.js new file mode 100644 index 0000000000..987e616112 --- /dev/null +++ b/src/streaming/ExternalSourceBuffer.js @@ -0,0 +1,129 @@ +/** + * The copyright in this software is being made available under the BSD License, + * included below. This software may be subject to other third party and contributor + * rights, including patent rights, and no such rights are granted under this license. + * + * Copyright (c) 2013, Dash Industry Forum. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without modification, + * are permitted provided that the following conditions are met: + * * Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation and/or + * other materials provided with the distribution. + * * Neither the name of Dash Industry Forum nor the names of its + * contributors may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, + * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +class ExternalSourceBuffer { + constructor(mimeType, eventBus) { + this.eventBus = eventBus; + this.mimeType = mimeType; + this.updating = false; + this.chunks = []; + this.appendWindowStart = 0; + this.appendWindowEnd = Infinity; + this.timestampOffset = 0; + this.mode = 'segments'; + } + + appendBuffer(segmentData, segmentStartTime, segmentEndTime) { + if (this.updating) { + throw new Error('SourceBuffer is currently updating'); + } + this.updating = true; + this.eventBus.trigger('externalSourceBufferUpdateStart', { mimeType: this.mimeType, request: 'appendBuffer', data: segmentData, start: segmentStartTime, end: segmentEndTime }); + + if (!Number.isNaN(segmentStartTime)) { + this.chunks.push({data: segmentData, start: segmentStartTime, end: segmentEndTime}); + this.chunks.sort((a, b) => a.start - b.start); // sort ascending based on start times + } + // Simulate async data append + setTimeout(() => { + this.updating = false; + this.eventBus.trigger('externalSourceBufferUpdating', { mimeType: this.mimeType }); + this.eventBus.trigger('externalSourceBufferUpdateEnd', { mimeType: this.mimeType }); + }, 10); + } + + abort() { + if (this.updating) { + this.updating = false; + this.eventBus.trigger('externalSourceBufferAbort', { mimeType: this.mimeType }); + this.eventBus.trigger('externalSourceBufferUpdateEnd', { mimeType: this.mimeType }); + } + } + + remove(start, end) { + if (this.updating) { + throw new Error('SourceBuffer is currently updating'); + } + this.updating = true; + this.eventBus.trigger('externalSourceBufferUpdateStart', { mimeType: this.mimeType, request: 'remove', start: start, end: end }); + + this.chunks = this.chunks.filter(segment => segment.end <= start || segment.start >= end); + + // Simulate async data removal + setTimeout(() => { + this.updating = false; + this.eventBus.trigger('externalSourceBufferUpdating', { mimeType: this.mimeType }); + this.eventBus.trigger('externalSourceBufferUpdateEnd', { mimeType: this.mimeType }); + }, 10); + } + + get buffered() { + return new TimeRanges(this.chunks); + } +} + +/** + * Implements TimeRanges interface as described in https://html.spec.whatwg.org/multipage/media.html#timeranges + * According to the spec, ranges in such an object are ordered, don't overlap, and don't touch + * (adjacent ranges are folded into one bigger range). + */ +class TimeRanges { + constructor(chunks) { + this._ranges = []; + + // Process ordered chunks into TimeRanges + for (const chunk of chunks) { + const ranges = this._ranges; + const newRange = { start: chunk.start, end: chunk.end }; + const lastRange = ranges.length ? ranges[ranges.length - 1] : null; + + if (!lastRange || newRange.start > lastRange.end) { + ranges.push(newRange); // empty or discontinuity in buffered period + } else { + lastRange.end = newRange.end; // continuous buffered period + } + } + } + + get length() { + return this._ranges.length; + } + + start(index) { + return this._ranges[index].start; + } + + end(index) { + return this._ranges[index].end; + } +} + +export default ExternalSourceBuffer; diff --git a/src/streaming/SourceBufferSink.js b/src/streaming/SourceBufferSink.js index 2fb5a5123a..9a41d918b6 100644 --- a/src/streaming/SourceBufferSink.js +++ b/src/streaming/SourceBufferSink.js @@ -33,9 +33,10 @@ import DashJSError from './vo/DashJSError.js'; import FactoryMaker from '../core/FactoryMaker.js'; import Errors from '../core/errors/Errors.js'; import Settings from '../core/Settings.js'; -import constants from './constants/Constants.js'; +import Constants from './constants/Constants.js'; import {HTTPRequest} from './vo/metrics/HTTPRequest.js'; import Events from '../core/events/Events.js'; +import ExternalSourceBuffer from './ExternalSourceBuffer.js'; const APPEND_WINDOW_START_OFFSET = 0.1; const APPEND_WINDOW_END_OFFSET = 0.01; @@ -130,7 +131,7 @@ function SourceBufferSink(config) { } catch (e) { // Note that in the following, the quotes are open to allow for extra text after stpp and wvtt - if ((mediaInfo.type == constants.TEXT && !mediaInfo.isFragmented) || (codec.indexOf('codecs="stpp') !== -1) || (codec.indexOf('codecs="vtt') !== -1) || (codec.indexOf('text/vtt') !== -1)) { + if ((mediaInfo.type == Constants.TEXT && !mediaInfo.isFragmented) || (codec.indexOf('codecs="stpp') !== -1) || (codec.indexOf('codecs="vtt') !== -1) || (codec.indexOf('text/vtt') !== -1)) { return _initializeForText(streamInfo); } return Promise.reject(e); @@ -374,7 +375,9 @@ function SourceBufferSink(config) { } catch (e) { } - if (buffer.appendBuffer) { + if (buffer instanceof ExternalSourceBuffer) { + buffer.appendBuffer(nextChunk.data.bytes, nextChunk.data.start, nextChunk.data.end); + } else if (buffer.appendBuffer) { buffer.appendBuffer(nextChunk.data.bytes); } else { buffer.append(nextChunk.data.bytes, nextChunk.data); diff --git a/src/streaming/Stream.js b/src/streaming/Stream.js index 6807c54ff7..df0de7e700 100644 --- a/src/streaming/Stream.js +++ b/src/streaming/Stream.js @@ -42,6 +42,7 @@ import DashJSError from './vo/DashJSError.js'; import BoxParser from './utils/BoxParser.js'; import URLUtils from './utils/URLUtils.js'; import BlacklistController from './controllers/BlacklistController.js'; +import ExternalMediaSource from './ExternalMediaSource.js'; import MediaInfoSelectionInput from './vo/MediaInfoSelectionInput.js'; @@ -350,6 +351,7 @@ function Stream(config) { let mediaInfo = null; let initialMediaInfo; + let enhancementMediaInfoIndex = -1; if (!allMediaForType || allMediaForType.length === 0) { logger.info('No ' + type + ' data.'); @@ -374,6 +376,10 @@ function Stream(config) { if (_isMediaSupported(mediaInfo)) { mediaController.addTrack(mediaInfo); } + + if (mediaInfo.type === Constants.ENHANCEMENT) { + enhancementMediaInfoIndex = i; + } } if (embeddedMediaInfos.length > 0) { @@ -413,7 +419,18 @@ function Stream(config) { mediaController.setInitialMediaSettingsForType(type, streamInfo); - let streamProcessor = _createStreamProcessor(allMediaForType, mediaSource); + let streamProcessor = _createStreamProcessor(allMediaForType, mediaSource, type); + + if (enhancementMediaInfoIndex >= 0) { + // An adaptation set, mapped to mediaInfo, of enhancement type was found so a stream processor shall be created for it + // the enhancement stream processor will work in parallel to the media stream processor it enhances + let enhancementMediaSource = new ExternalMediaSource(eventBus); + enhancementMediaSource.open(); + enhancementMediaSource.duration = streamInfo.manifestInfo.duration; + let enhancementStreamProcessor = _createStreamProcessor(allMediaForType, enhancementMediaSource, Constants.ENHANCEMENT); + enhancementStreamProcessor.selectMediaInfo(new MediaInfoSelectionInput({ newMediaInfo: allMediaForType[enhancementMediaInfoIndex] })); + streamProcessor.setEnhancementStreamProcessor(enhancementStreamProcessor); + } initialMediaInfo = mediaController.getCurrentTrackFor(type, streamInfo.id); @@ -457,11 +474,12 @@ function Stream(config) { * Creates the StreamProcessor for a given media type. * @param {array} allMediaForType * @param {object} mediaSource + * @param {object} streamProcessorMediaType * @private */ - function _createStreamProcessor(allMediaForType, mediaSource) { + function _createStreamProcessor(allMediaForType, mediaSource, streamProcessorMediaType) { - const mediaInfo = (allMediaForType && allMediaForType.length > 0) ? allMediaForType[0] : null; + const mediaInfo = (allMediaForType && allMediaForType.length > 0) ? allMediaForType.filter(m => (m.type === streamProcessorMediaType))[0] : null; let fragmentModel = fragmentController.getModel(mediaInfo ? mediaInfo.type : null); const type = mediaInfo ? mediaInfo.type : null; const mimeType = mediaInfo ? mediaInfo.mimeType : null; @@ -927,7 +945,7 @@ function Stream(config) { streamProcessor = streamProcessors[i]; type = streamProcessor.getType(); - if (type === Constants.AUDIO || type === Constants.VIDEO || type === Constants.TEXT) { + if (type === Constants.AUDIO || type === Constants.VIDEO || type === Constants.TEXT || type === Constants.ENHANCEMENT) { arr.push(streamProcessor); } } diff --git a/src/streaming/StreamProcessor.js b/src/streaming/StreamProcessor.js index 1d3a914408..a7c6d5c82a 100644 --- a/src/streaming/StreamProcessor.js +++ b/src/streaming/StreamProcessor.js @@ -80,8 +80,10 @@ function StreamProcessor(config) { let bufferController, bufferingTime, + containsVideoTrack, currentMediaInfo, dashHandler, + enhancementStreamProcessor, instance, isDynamic, logger, @@ -191,6 +193,7 @@ function StreamProcessor(config) { settings }); + containsVideoTrack = hasVideoTrack; scheduleController.initialize(hasVideoTrack); bufferingTime = 0; @@ -213,6 +216,7 @@ function StreamProcessor(config) { shouldUseExplicitTimeForRequest = false; shouldRepeatRequest = false; qualityChangeInProgress = false; + enhancementStreamProcessor = null; trackSwitchInProgress = false; _resetPendingSwitchToRepresentation(); } @@ -270,6 +274,11 @@ function StreamProcessor(config) { mediaInfoArr = value; } + function setEnhancementStreamProcessor(value) { + enhancementStreamProcessor = value; + logger.info('enhancementStreamProcessor = ' + enhancementStreamProcessor); + } + /** * When a seek within the corresponding period occurs this function initiates the clearing of the buffer and sets the correct buffering time. * @param {object} e @@ -588,7 +597,7 @@ function StreamProcessor(config) { } function _onDataUpdateCompleted() { - const currentRepresentation = representationController.getCurrentRepresentation() + const currentRepresentation = representationController.getCurrentCompositeRepresentation(); if (!bufferController.getIsBufferingCompleted()) { bufferController.updateBufferTimestampOffset(currentRepresentation); } @@ -691,6 +700,8 @@ function StreamProcessor(config) { eventBus.trigger() + _selectMediaInfoForEnhancementStreamProcessor(selectedValues); + // Update Representation Controller with the new data. Note we do not filter any Representations here as the filter values might change over time. const voRepresentations = abrController.getPossibleVoRepresentations(currentMediaInfo, false); return representationController.updateData(voRepresentations, currentMediaInfo.isFragmented, selectedValues.selectedRepresentation.id) @@ -752,6 +763,16 @@ function StreamProcessor(config) { } } + function _selectMediaInfoForEnhancementStreamProcessor(selectedValues) { + if (enhancementStreamProcessor && selectedValues.selectedRepresentation.dependentRepresentation) { + logger.info('[' + type + '] selectMediaInfo : call selectMediaInfo on enhancementStreamProcessor for index = ' + selectedValues.selectedRepresentation.absoluteIndex); + enhancementStreamProcessor.selectMediaInfo(new MediaInfoSelectionInput({ + newMediaInfo: selectedValues.selectedRepresentation.mediaInfo, + newRepresentation: selectedValues.selectedRepresentation + })); + } + } + /** * The quality has changed which means we have switched to a different representation. * If we want to aggressively replace existing parts in the buffer we need to make sure that the new quality is higher than the already buffered one. @@ -762,15 +783,20 @@ function StreamProcessor(config) { return; } + const qualityChangeHandled = _prepareQualityChangeForEnhancementStreamProcessor(e); + if (qualityChangeHandled) { + return; + } + if (pendingSwitchToVoRepresentation && pendingSwitchToVoRepresentation.enabled) { logger.warn(`Canceling queued representation switch to ${pendingSwitchToVoRepresentation.newRepresentation.id} for ${type}`); } if (e.isAdaptationSetSwitch) { - logger.debug(`Preparing quality switch to different AdaptationSet for type ${type}`); + logger.debug(`Preparing quality switch to different AdaptationSet for type ${type} from representation id ${e.oldRepresentation.id} to ${e.newRepresentation.id}`); _prepareAdaptationSwitchQualityChange(e) } else { - logger.debug(`Preparing quality within the same AdaptationSet for type ${type}`); + logger.debug(`Preparing quality within the same AdaptationSet for type ${type} from representation id ${e.oldRepresentation.id} to ${e.newRepresentation.id}`); _prepareNonAdaptationSwitchQualityChange(e) } } @@ -967,6 +993,39 @@ function StreamProcessor(config) { qualityChangeInProgress = false; } + /** + * Prepare quality change for enhancement stream processor. Returns true if the change has been handled, false otherwise. + * @param {object} e + * @return {boolean} qualityChangeHandled returns true if the change has been handled, false otherwise + */ + function _prepareQualityChangeForEnhancementStreamProcessor(e) { + if (enhancementStreamProcessor) { + // Pass quality change to enhancement stream processor + enhancementStreamProcessor.prepareQualityChange(e); + } + else if (type === Constants.ENHANCEMENT) { + // This is an enhancement stream processor, handle the quality change + const oldRepType = e.oldRepresentation.mediaInfo.type; + const newRepType = e.newRepresentation.mediaInfo.type; + + if (oldRepType === Constants.ENHANCEMENT && newRepType === Constants.VIDEO) { + // The new representation has no enhancement, stop the enhancement stream processor + logger.info('Stop ' + type + ' stream processor'); + scheduleController.reset(); + return true; + } else if (oldRepType === Constants.VIDEO && newRepType === Constants.ENHANCEMENT) { + // The new representation has an enhancement, start the enhancement stream processor + logger.info('Start ' + type + ' stream processor'); + selectMediaInfo(new MediaInfoSelectionInput({ newMediaInfo: e.newRepresentation.mediaInfo, newRepresentation: e.newRepresentation })).then(() => { + scheduleController.setup(); + scheduleController.initialize(containsVideoTrack); + scheduleController.startScheduleTimer(); + }); + return true; + } + } + return false; + } /** * We have canceled the download of a fragment and need to adjust the buffer time or reload an init segment @@ -1538,6 +1597,7 @@ function StreamProcessor(config) { probeNextRequest, reset, selectMediaInfo, + setEnhancementStreamProcessor, setExplicitBufferingTime, setMediaInfoArray, setMediaSource, diff --git a/src/streaming/constants/Constants.js b/src/streaming/constants/Constants.js index ce4443665b..2c635ee0a0 100644 --- a/src/streaming/constants/Constants.js +++ b/src/streaming/constants/Constants.js @@ -47,6 +47,13 @@ export default { */ VIDEO: 'video', + /** + * @constant {string} ENHANCEMENT Enhancement media type + * @memberof Constants# + * @static + */ + ENHANCEMENT: 'enhancement', + /** * @constant {string} AUDIO Audio media type * @memberof Constants# @@ -340,6 +347,7 @@ export default { ID3_SCHEME_ID_URI: 'https://aomedia.org/emsg/ID3', COMMON_ACCESS_TOKEN_HEADER: 'common-access-token', DASH_ROLE_SCHEME_ID : 'urn:mpeg:dash:role:2011', + CODEC_FAMILIES: { MP3: 'mp3', AAC: 'aac', diff --git a/src/streaming/controllers/AbrController.js b/src/streaming/controllers/AbrController.js index e5503a6769..a07ec197f4 100644 --- a/src/streaming/controllers/AbrController.js +++ b/src/streaming/controllers/AbrController.js @@ -290,6 +290,8 @@ function AbrController() { voRepresentations = voRepresentations.concat(currentVoRepresentations) } }) + // Resolve dependencies + voRepresentations = _resolveDependencies(voRepresentations); // Now sort by quality (usually simply by bitrate) voRepresentations = _sortRepresentationsByQuality(voRepresentations); @@ -513,6 +515,19 @@ function AbrController() { } + function _resolveDependencies(voRepresentations) { + voRepresentations.forEach(rep => { + if (rep.dependentRepresentation && rep.dependentRepresentation.mediaInfo === null) { + let dependentId = rep.dependentRepresentation.id; + let dependentRep = voRepresentations.find((element) => element.id === dependentId); + if (dependentRep) { + rep.dependentRepresentation = dependentRep; + } + } + }); + return voRepresentations; + } + /** * While fragment loading is in progress we check if we might need to abort the request * @param {object} e @@ -658,7 +673,7 @@ function AbrController() { } const streamProcessor = streamProcessorDict[streamId][type]; - const currentRepresentation = streamProcessor.getRepresentation(); + const currentRepresentation = streamProcessor.getRepresentationController()?.getCurrentCompositeRepresentation(); const rulesContext = RulesContext(context).create({ abrController: instance, throughputController, @@ -681,7 +696,7 @@ function AbrController() { }); if (newRepresentation.id !== currentRepresentation.id && (abandonmentStateDict[streamId][type].state === MetricsConstants.ALLOW_LOAD || newRepresentation.absoluteIndex < currentRepresentation.absoluteIndex)) { - _changeQuality(currentRepresentation, newRepresentation, switchRequest.reason); + _changeQuality(type, currentRepresentation, newRepresentation, switchRequest.reason); return true; } @@ -707,11 +722,11 @@ function AbrController() { } const streamProcessor = streamProcessorDict[streamInfo.id][type]; - const currentRepresentation = streamProcessor.getRepresentation(); + const currentRepresentation = streamProcessor.getRepresentationController()?.getCurrentCompositeRepresentation(); if (!currentRepresentation || representation.id !== currentRepresentation.id) { - _changeQuality(currentRepresentation, representation, reason); + _changeQuality(type, currentRepresentation, representation, reason); } } @@ -738,9 +753,8 @@ function AbrController() { * @param {string} reason * @private */ - function _changeQuality(oldRepresentation, newRepresentation, reason) { + function _changeQuality(type, oldRepresentation, newRepresentation, reason) { const streamId = newRepresentation.mediaInfo.streamInfo.id; - const type = newRepresentation.mediaInfo.type; if (type && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { const streamInfo = streamProcessorDict[streamId][type].getStreamInfo(); const bufferLevel = dashMetrics.getCurrentBufferLevel(type); diff --git a/src/streaming/controllers/BufferController.js b/src/streaming/controllers/BufferController.js index a61d978bec..10bd4e445d 100644 --- a/src/streaming/controllers/BufferController.js +++ b/src/streaming/controllers/BufferController.js @@ -603,7 +603,14 @@ function BufferController(config) { } logger.debug(`Using changeType() to switch from codec ${oldRepresentation.codecs} to ${newRepresentation.codecs}`); - return sourceBufferSink.changeType(newRepresentation); + + // SourceBufferSink's changeType will be invoked with the AbrRepresentation, ie. + // representation from the manifest. However, MSE SourceBuffer doesn't understand + // enhancement codecs. In the case an enhancement representation is selected, resolve + // the dependent (base) representation before passing the codecs to MSE's changeType + const representation = newRepresentation.dependentRepresentation ? + newRepresentation.dependentRepresentation : newRepresentation; + return sourceBufferSink.changeType(representation); } function pruneAllSafely() { diff --git a/src/streaming/controllers/MediaController.js b/src/streaming/controllers/MediaController.js index a5ba6c70db..cf1944b477 100644 --- a/src/streaming/controllers/MediaController.js +++ b/src/streaming/controllers/MediaController.js @@ -180,6 +180,7 @@ function MediaController() { if (!track) { return; } + logger.info('addTrack with track.codec=\'' + track.codec + '\', track.type=\'' + track.type + '\''); const mediaType = track.type; if (!_isMultiTrackSupportedByType(mediaType)) { diff --git a/src/streaming/controllers/ScheduleController.js b/src/streaming/controllers/ScheduleController.js index 072bb3ecb6..9828694394 100644 --- a/src/streaming/controllers/ScheduleController.js +++ b/src/streaming/controllers/ScheduleController.js @@ -449,6 +449,7 @@ function ScheduleController(config) { setCheckPlaybackQuality, setInitSegmentRequired, setLastInitializedRepresentationId, + setup, setSwitchTrack, setTimeToLoadDelay, startScheduleTimer, diff --git a/src/streaming/models/CmcdModel.js b/src/streaming/models/CmcdModel.js index ada7412eaa..89f15edd39 100644 --- a/src/streaming/models/CmcdModel.js +++ b/src/streaming/models/CmcdModel.js @@ -415,6 +415,9 @@ function CmcdModel() { if (mediaType === Constants.AUDIO) { ot = CmcdObjectType.AUDIO; } + if (request.mediaType === Constants.ENHANCEMENT) { + ot = CmcdObjectType.OTHER; + } if (mediaType === Constants.TEXT) { if (request.representation.mediaInfo.mimeType === 'application/mp4') { ot = CmcdObjectType.TIMED_TEXT; diff --git a/src/streaming/rules/abr/InsufficientBufferRule.js b/src/streaming/rules/abr/InsufficientBufferRule.js index baa9076f23..ffef28a300 100644 --- a/src/streaming/rules/abr/InsufficientBufferRule.js +++ b/src/streaming/rules/abr/InsufficientBufferRule.js @@ -127,6 +127,7 @@ function InsufficientBufferRule(config) { bufferStateDict = {}; bufferStateDict[Constants.VIDEO] = { ignoreCount: segmentIgnoreCount }; bufferStateDict[Constants.AUDIO] = { ignoreCount: segmentIgnoreCount }; + bufferStateDict[Constants.ENHANCEMENT] = { ignoreCount: segmentIgnoreCount }; } function _onPlaybackSeeking() { diff --git a/src/streaming/utils/Capabilities.js b/src/streaming/utils/Capabilities.js index 01573cf21d..a7bd5e2c8f 100644 --- a/src/streaming/utils/Capabilities.js +++ b/src/streaming/utils/Capabilities.js @@ -138,6 +138,11 @@ function Capabilities() { return Promise.resolve(); } + const enhancementCodecs = settings.get().streaming.enhancement.codecs; + if (settings.get().streaming.enhancement.enabled && enhancementCodecs.some(cdc => basicConfiguration.codec.includes(cdc))) { + return Promise.resolve(true); + } + const configurationsToTest = _getEnhancedConfigurations(basicConfiguration, type); if (_canUseMediaCapabilitiesApi(basicConfiguration, type)) { diff --git a/test/unit/helpers/ObjectsHelper.js b/test/unit/helpers/ObjectsHelper.js index a36286b11d..1414ff1971 100644 --- a/test/unit/helpers/ObjectsHelper.js +++ b/test/unit/helpers/ObjectsHelper.js @@ -25,6 +25,11 @@ class ObjectsHelper { }, getRepresentation: () => { return null + }, + getRepresentationController: () => { + return { + getCurrentCompositeRepresentation: () => null + } } } } diff --git a/test/unit/helpers/VOHelper.js b/test/unit/helpers/VOHelper.js index 75bbb88f3b..8d6cd1569f 100644 --- a/test/unit/helpers/VOHelper.js +++ b/test/unit/helpers/VOHelper.js @@ -79,6 +79,7 @@ class VoHelper { rep.segmentAvailabilityWindow = null; rep.availableSegmentsNumber = 0; rep.mediaInfo = this.getDummyMediaInfo(type); + rep.dependentRepresentation = null; return rep; } diff --git a/test/unit/test/dash/dash.controllers.RepresentationController.js b/test/unit/test/dash/dash.controllers.RepresentationController.js index 4b3995c8ba..4f68b9e79f 100644 --- a/test/unit/test/dash/dash.controllers.RepresentationController.js +++ b/test/unit/test/dash/dash.controllers.RepresentationController.js @@ -25,13 +25,17 @@ describe('RepresentationController', function () { // Arrange const context = {}; const testType = 'video'; + const enhancementType = 'enhancement'; const specHelper = new SpecHelper(); const voRepresentations = []; - voRepresentations.push(voHelper.getDummyRepresentation(testType, 0), voHelper.getDummyRepresentation(testType, 1), voHelper.getDummyRepresentation(testType, 2)); + voRepresentations.push(voHelper.getDummyRepresentation(testType, 0), voHelper.getDummyRepresentation(testType, 1), voHelper.getDummyRepresentation(testType, 2), voHelper.getDummyRepresentation(enhancementType, 3)); const streamProcessor = objectsHelper.getDummyStreamProcessor(testType); const eventBus = EventBus(context).getInstance(); const timelineConverter = objectsHelper.getDummyTimelineConverter(); + // Representation 3 is an enhancement representation that has a dependent representation 1. + voRepresentations[3].dependentRepresentation = voRepresentations[1]; + Events.extend(MediaPlayerEvents); const abrControllerMock = new AbrControllerMock(); @@ -104,6 +108,18 @@ describe('RepresentationController', function () { expect(currentRepresentation.adaptation.period.duration).to.equal(150); // jshint ignore:line }); + it('should switch correctly when prepareQualityChange is called with an enhancement representation', function () { + let representation = representationController.getCurrentRepresentation(); + + expect(representation.id).to.equal(voRepresentations[0].id) + + // switch to an enchancement representation 3 with dependentRepresentation 1. + representationController.prepareQualityChange(voRepresentations[3]); + representation = representationController.getCurrentRepresentation(); + + expect(representation.id).to.equal(voRepresentations[1].id); + }); + }); describe('when a call to reset is done', function () { diff --git a/test/unit/test/streaming/streaming.ExternalMediaSource.js b/test/unit/test/streaming/streaming.ExternalMediaSource.js new file mode 100644 index 0000000000..4abcbb21c0 --- /dev/null +++ b/test/unit/test/streaming/streaming.ExternalMediaSource.js @@ -0,0 +1,81 @@ +import ExternalMediaSource from '../../../../src/streaming/ExternalMediaSource.js'; +import ExternalSourceBuffer from '../../../../src/streaming/ExternalSourceBuffer.js'; +import EventBus from '../../../../src/core/EventBus.js'; + +import {expect} from 'chai'; + +describe('ExternalMediaSource', () => { + let mediaSource; + + beforeEach(() => { + const eventBus = EventBus().getInstance(); + mediaSource = new ExternalMediaSource(eventBus); + }); + + it('should initialize with the correct default values', () => { + expect(mediaSource.sourceBuffers).to.be.an.instanceof(Map).and.to.have.property('size', 0); + expect(mediaSource.duration).to.be.NaN; + expect(mediaSource.readyState).to.equal('closed'); + }); + + it('addSourceBuffer should add a new source buffer', () => { + mediaSource.onsourceopen = (done) => { + const mimeType = 'video/mp4; codecs="lvc1"'; + const sourceBuffer = mediaSource.addSourceBuffer(mimeType); + expect(mediaSource.sourceBuffers).to.have.property('size', 1); + expect(sourceBuffer).to.be.instanceOf(ExternalSourceBuffer); + expect(sourceBuffer.mimeType).to.equal(mimeType); + done(); + } + mediaSource.open(); + }); + + it('removeSourceBuffer should remove an existing source buffer', () => { + mediaSource.onsourceopen = (done) => { + const mimeType = 'video/mp4; codecs="lvc1"'; + const sourceBuffer = mediaSource.addSourceBuffer(mimeType); + mediaSource.removeSourceBuffer(sourceBuffer); + expect(mediaSource.sourceBuffers).to.have.property('size', 0); + done(); + } + mediaSource.open(); + }); + + it('should set and get duration correctly', () => { + mediaSource.onsourceopen = (done) => { + mediaSource.duration = 120.5; + expect(mediaSource.duration).to.equal(120.5); + done(); + } + mediaSource.open(); + }); + + it('setting duration should throw an error if readyState is not "open"', () => { + expect(() => { + mediaSource.duration = 100; + }).to.throw('ExternalMediaSource is not open'); + }); + + it('setting duration should update the duration when readyState is "open"', () => { + mediaSource.onsourceopen = (done) => { + mediaSource.duration = 100; + expect(mediaSource.duration).to.equal(100); + done(); + } + mediaSource.open(); + }); + + it('should transition readyState correctly', () => { + expect(mediaSource.readyState).to.equal('closed'); + mediaSource.onsourceopen = (done) => { + expect(mediaSource.readyState).to.equal('open'); + mediaSource.onsourceended = (done) => { + expect(mediaSource.readyState).to.equal('ended'); + done(); + } + mediaSource.endOfStream(); + done(); + } + mediaSource.open(); + }); +}); diff --git a/test/unit/test/streaming/streaming.ExternalSourceBuffer.js b/test/unit/test/streaming/streaming.ExternalSourceBuffer.js new file mode 100644 index 0000000000..b23c32ef9d --- /dev/null +++ b/test/unit/test/streaming/streaming.ExternalSourceBuffer.js @@ -0,0 +1,94 @@ +import ExternalSourceBuffer from '../../../../src/streaming/ExternalSourceBuffer.js'; +import EventBus from '../../../../src/core/EventBus.js'; + +import {expect} from 'chai'; + +describe('ExternalSourceBuffer', () => { + let sourceBuffer; + + beforeEach(() => { + const eventBus = EventBus().getInstance(); + sourceBuffer = new ExternalSourceBuffer('video/mp4; codecs="lvc1"', eventBus); + }); + + it('should initialize with the correct default values', () => { + expect(sourceBuffer.mimeType).to.equal('video/mp4; codecs="lvc1"'); + expect(sourceBuffer.updating).to.be.false; + expect(sourceBuffer.chunks).to.be.an('array').that.is.empty; + expect(sourceBuffer.appendWindowStart).to.equal(0); + expect(sourceBuffer.appendWindowEnd).to.equal(Infinity); + expect(sourceBuffer.timestampOffset).to.equal(0); + expect(sourceBuffer.mode).to.equal('segments'); + }); + + it('appendBuffer should add data to the buffer', () => { + const data = new Uint8Array([1, 2, 3, 4]); + const start = 0; + const end = 5; + sourceBuffer.onupdateend = (done) => { + expect(sourceBuffer.chunks).to.have.lengthOf(1); + expect(sourceBuffer.chunks[0].data).to.eql([1, 2, 3, 4]); + expect(sourceBuffer.chunks[0].start).to.eql(0); + expect(sourceBuffer.chunks[0].end).to.eql(5); + done(); + }; + sourceBuffer.appendBuffer(data, start, end); + }); + + it('should throw an error if appendBuffer is called while updating', () => { + const data = new Uint8Array([1, 2, 3, 4, 5]); + const start = 5; + const end = 10; + sourceBuffer.updating = true; + expect(() => sourceBuffer.appendBuffer(data, start, end)).to.throw('SourceBuffer is currently updating'); + }); + + it('abort should clear the buffer and set updating to false', () => { + const data = new Uint8Array([1, 2, 3]); + const start = 10; + const end = 15; + sourceBuffer.onupdateend = (done) => { + sourceBuffer.onupdateend = (done) => { + expect(sourceBuffer.chunks).to.be.empty; + expect(sourceBuffer.updating).to.be.false; + done(); + } + sourceBuffer.abort(); + done(); + } + sourceBuffer.appendBuffer(data, start, end); + }); + + it('remove should remove data from the buffer', () => { + const entry_0 = { + data: new Uint8Array([5, 6, 7]), + start: 15, + end: 20, + }; + const entry_1 = { + data: new Uint8Array([0, 1, 2]), + start: 10, + end: 15, + }; + sourceBuffer.onupdateend = (done) => { + sourceBuffer.onupdateend = (done) => { + sourceBuffer.onupdateend = (done) => { + expect(sourceBuffer.chunks[0].data).to.eql([0, 1, 2]); + expect(sourceBuffer.chunks[0].start).to.eql(10); + expect(sourceBuffer.chunks[0].end).to.eql(15); + done(); + } + sourceBuffer.remove(15, 20); + done(); + }; + sourceBuffer.appendBuffer(entry_1.data, entry_1.start, entry_1.end); + done(); + }; + sourceBuffer.appendBuffer(entry_0.data, entry_0.start, entry_0.end); + }); + + it('should throw an error if remove is called while updating', () => { + sourceBuffer.updating = true; + expect(() => sourceBuffer.remove(0, 1)).to.throw('SourceBuffer is currently updating'); + }); +}); diff --git a/test/unit/test/streaming/streaming.StreamProcessor.js b/test/unit/test/streaming/streaming.StreamProcessor.js index 05c99c7744..800dd193dc 100644 --- a/test/unit/test/streaming/streaming.StreamProcessor.js +++ b/test/unit/test/streaming/streaming.StreamProcessor.js @@ -26,6 +26,14 @@ describe('StreamProcessor', function () { expect(streamProcessor.setExplicitBufferingTime.bind(streamProcessor)).to.not.throw(); }); + it('setEnhancementStreamProcessor should exist', function () { + expect(streamProcessor.setEnhancementStreamProcessor).to.be.a('function'); + }); + + it('setEnhancementStreamProcessor should not throw an error', function () { + expect(streamProcessor.setEnhancementStreamProcessor.bind(streamProcessor, {})).to.not.throw(); + }); + }); }); diff --git a/test/unit/test/streaming/streaming.controllers.AbrController.js b/test/unit/test/streaming/streaming.controllers.AbrController.js index cdec63b5ed..56e551f713 100644 --- a/test/unit/test/streaming/streaming.controllers.AbrController.js +++ b/test/unit/test/streaming/streaming.controllers.AbrController.js @@ -29,7 +29,13 @@ describe('AbrController', function () { const settings = Settings(context).getInstance(); const abrCtrl = AbrController(context).getInstance(); const dummyMediaInfo = voHelper.getDummyMediaInfo(Constants.VIDEO); - const dummyRepresentations = [voHelper.getDummyRepresentation(Constants.VIDEO, 0), voHelper.getDummyRepresentation(Constants.VIDEO, 1)]; + const enhancementMediaInfo = voHelper.getDummyMediaInfo(Constants.ENHANCEMENT); + const dummyRepresentations = [voHelper.getDummyRepresentation(Constants.VIDEO, 0), voHelper.getDummyRepresentation(Constants.VIDEO, 1), + voHelper.getDummyRepresentation(Constants.ENHANCEMENT, 2)]; + + // Representation 2 has a dependentRepresentation with id 0 + dummyRepresentations[2].dependentRepresentation = dummyRepresentations[0]; + const domStorageMock = new DomStorageMock(); const dashMetricsMock = new DashMetricsMock(); const streamControllerMock = new StreamControllerMock(); @@ -464,6 +470,26 @@ describe('AbrController', function () { abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, dummyRepresentations[0]); }); + it('should switch to a new enhancement Representation and have the correct dependentRep', function (done) { + const enhancementRepresentation = dummyRepresentations[2]; + const dependentRepresentation = dummyRepresentations[0]; + + const onQualityChange = (e) => { + expect(e.oldRepresentation).to.not.exist; + + // Representation 2 should have dependentRepresentation with id 0 + expect(e.newRepresentation.id).to.be.equal(enhancementRepresentation.id); + expect(e.newRepresentation.dependentRepresentation.id).to.be.equal(dependentRepresentation.id); + + eventBus.off(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChange); + done(); + } + + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChange, this); + + abrCtrl.setPlaybackQuality(Constants.VIDEO, enhancementMediaInfo.streamInfo, enhancementRepresentation); + }); + it('should ignore an attempt to set a quality value if no streamInfo is provided', function () { const spy = sinon.spy(); diff --git a/test/unit/test/streaming/streaming.utils.Capabilities.js b/test/unit/test/streaming/streaming.utils.Capabilities.js index d965ac3365..9d0ddbee3c 100644 --- a/test/unit/test/streaming/streaming.utils.Capabilities.js +++ b/test/unit/test/streaming/streaming.utils.Capabilities.js @@ -3,6 +3,7 @@ import Settings from '../../../../src/core/Settings.js'; import DescriptorType from '../../../../src/dash/vo/DescriptorType.js'; import {expect} from 'chai'; +import Constants from '../../../../src/streaming/constants/Constants.js'; //import {UAParser} from 'ua-parser-js'; let settings; @@ -14,6 +15,7 @@ let capabilities; // The Media Capabilities API seems to return wrong values on Linux with Firefox. Deactivate some tests for now //const isLinuxFirefox = ua.browser.name.toLowerCase() === 'firefox' && ua.os.name.toLowerCase().includes('linux'); +const enhancementCodecs = ['lvc1']; let EssentialPropertyThumbNail = new DescriptorType; EssentialPropertyThumbNail.init({ @@ -546,6 +548,16 @@ describe('Capabilities', function () { }) */ + it('should return true for enhancement codecs', function () { + settings.update({ streaming: { enhancement: { enabled: true } } }); + + const res = capabilities.runCodecSupportCheck({ codec: `video/${enhancementCodecs[0]}` }, Constants.VIDEO); + + return res.then(function (isSupported) { + expect(isSupported).to.be.true; + }); + }); + }) });