diff --git a/.circleci/config.yml b/.circleci/config.yml index 20d0412639..1da5feec0e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,6 @@ version: 2.1 orbs: - browser-tools: circleci/browser-tools@1.4.2 + browser-tools: circleci/browser-tools@1.4.6 executors: dashjs-executor: diff --git a/.github/workflows/deploy_latest.yml b/.github/workflows/deploy_latest.yml index 798a814d08..8b66499cc9 100644 --- a/.github/workflows/deploy_latest.yml +++ b/.github/workflows/deploy_latest.yml @@ -1,4 +1,4 @@ -name: deploy +name: deploy_latest on: push: diff --git a/.github/workflows/deploy_nightly.yml b/.github/workflows/deploy_nightly.yml index 25035a7c92..d45b925d90 100644 --- a/.github/workflows/deploy_nightly.yml +++ b/.github/workflows/deploy_nightly.yml @@ -1,4 +1,4 @@ -name: deploy +name: deploy_nightly on: push: diff --git a/.github/workflows/deploy_v5.yml b/.github/workflows/deploy_v5.yml new file mode 100644 index 0000000000..9f7fc754cb --- /dev/null +++ b/.github/workflows/deploy_v5.yml @@ -0,0 +1,18 @@ +name: deploy_v5 + +on: + push: + branches: + - 'v5.0.0' + +jobs: + deploy_staging: + if: github.repository == 'Dash-Industry-Forum/dash.js' + uses: ./.github/workflows/deploy.yml + with: + envname: v5 + deploy_path: '/377335/dash.js' + secrets: + host: ${{secrets.HOST}} + user: ${{secrets.USER}} + private_key: ${{secrets.PRIVATE_KEY}} diff --git a/contrib/akamai/controlbar/ControlBar.js b/contrib/akamai/controlbar/ControlBar.js index 3864dbdeea..df59812812 100644 --- a/contrib/akamai/controlbar/ControlBar.js +++ b/contrib/akamai/controlbar/ControlBar.js @@ -528,13 +528,13 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { destroyMenu(bitrateListMenu, bitrateListBtn, menuHandlersList.bitrate); bitrateListMenu = null; var availableBitrates = { menuType: 'bitrate' }; - availableBitrates.audio = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('audio') || []; - availableBitrates.video = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('video') || []; - availableBitrates.images = self.player.getBitrateInfoListFor && self.player.getBitrateInfoListFor('image') || []; + availableBitrates.audio = self.player.getRepresentationsByType && self.player.getRepresentationsByType('audio') || []; + availableBitrates.video = self.player.getRepresentationsByType && self.player.getRepresentationsByType('video') || []; + availableBitrates.images = self.player.getRepresentationsByType && self.player.getRepresentationsByType('image') || []; if (availableBitrates.audio.length >= 1 || availableBitrates.video.length >= 1 || availableBitrates.images.length >= 1) { contentFunc = function (element, index) { - var result = isNaN(index) ? ' Auto Switch' : Math.floor(element.bitrate / 1000) + ' kbps'; + var result = isNaN(index) ? ' Auto Switch' : Math.floor(element.bitrateInKbit) + ' kbps'; result += element && element.width && element.height ? ' (' + element.width + 'x' + element.height + ')' : ''; return result; }; @@ -575,7 +575,7 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { } if (element.roles[0]) { - info += '- Role: ' + element.roles[0] + ' '; + info += '- Role: ' + element.roles[0].value + ' '; } if (element.codec) { @@ -852,14 +852,14 @@ var ControlBar = function (dashjsMediaPlayer, displayUTCTimeCodes) { if (item.index > 0) { cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = false; self.player.updateSettings(cfg); - self.player.setQualityFor(item.mediaType, item.index - 1, forceQuality); + self.player.setRepresentationForTypeByIndex(item.mediaType, item.index - 1, forceQuality); } else { cfg.streaming.abr.autoSwitchBitrate[item.mediaType] = true; self.player.updateSettings(cfg); } break; case 'image-bitrate-list': - player.setQualityFor(item.mediaType, item.index); + player.setRepresentationForTypeByIndex(item.mediaType, item.index); break; case 'caption-list': self.player.setTextTrack(item.index - 1); diff --git a/index.d.ts b/index.d.ts index 6332e9bdc8..cdf3e65d66 100644 --- a/index.d.ts +++ b/index.d.ts @@ -122,11 +122,11 @@ declare namespace dashjs { getZlema(dict: ThroughputDictEntry[], sampleSize: number): number - getAverageThroughput(mediaType: MediaType, calculationMode: Constants["THROUGHPUT_CALCULATION_MODES"], sampleSize: number): number + getAverageThroughput(mediaType: MediaType, calculationMode: string, sampleSize: number): number - getSafeAverageThroughput(mediaType: MediaType, calculationMode: Constants["THROUGHPUT_CALCULATION_MODES"], sampleSize: number): number + getSafeAverageThroughput(mediaType: MediaType, calculationMode: string, sampleSize: number): number - getAverageLatency(mediaType: MediaType, calculationMode: Constants["THROUGHPUT_CALCULATION_MODES"], sampleSize: number): number + getAverageLatency(mediaType: MediaType, calculationMode: string, sampleSize: number): number getRawThroughputData(mediaType: MediaType): number @@ -148,7 +148,7 @@ declare namespace dashjs { getCurrentRepresentation(): object; - getCurrentRepresentationInfo(): RepresentationInfo; + getCurrentRepresentation(): Representation; getRepresentationForQuality(quality: number): object | null; @@ -572,41 +572,34 @@ declare namespace dashjs { } export interface Representation { + adaptation: AdaptationSet | null; + availabilityTimeComplete: boolean; + availabilityTimeOffset: number; + availableSegmentsNumber: number; + bandwidth: number; + codecPrivateData: string | null; + codecs: string | null; + fragmentDuration: number | null; + frameRate: number; + height: number; id: string; index: number; - //adaptation needs checking - adaptation: AdaptationSet | null; - segmentInfoType: string | null; + indexRange: string | null; initialization: object | null; - codecs: string | null; + maxPlayoutRate: number; + mediaFinishedInformation: MediaFinishedInformation; + mediaInfo: MediaInfo | null; mimeType: string | null; - codecPrivateData: string | null; - segmentDuration: number; - timescale: number; - startNumber: number; - indexRange: string | null; - range: Range | null; + mseTimeOffset: number; presentationTimeOffset: number; - MSETimeOffset: number; - mediaFinishedInformation: MediaFinishedInformation; - availableSegmentsNumber: number; - bandwidth: number; - width: number; - height: number; + range: Range | null; scanType: string; - maxPlayoutRate: number; - availabilityTimeOffset: number; - availabilityTimeComplete: boolean; + segmentDuration: number; + segmentInfoType: string | null; segments: any[]; - frameRate: number; - } - - export interface RepresentationInfo { - id: string | null; - quality: number | null; - fragmentDuration: number | null; - mediaInfo: MediaInfo | null; - MSETimeOffset: number | null; + startNumber: number; + timescale: number; + width: number; } export interface Segment { @@ -1225,18 +1218,10 @@ declare namespace dashjs { audio?: number; video?: number; }; - maxRepresentationRatio?: { - audio?: number; - video?: number; - }; initialBitrate?: { audio?: number; video?: number; }; - initialRepresentationRatio?: { - audio?: number; - video?: number; - }; autoSwitchBitrate?: { audio?: boolean; video?: boolean; @@ -1454,7 +1439,7 @@ declare namespace dashjs { setTextTrack(idx: number): void; - getBitrateInfoListFor(type: MediaType): BitrateInfo[]; + getRepresentationsFor(type: MediaType): Representation[]; getStreamsFromManifest(manifest: object): StreamInfo[]; @@ -2105,7 +2090,7 @@ declare namespace dashjs { getTopBitrateInfoFor(type: string, streamId?: string): BitrateInfo | null; - getInitialBitrateFor(type: string, streamId: string): number; + getInitialBitrateFor(type: string): number; checkPlaybackQuality(type: string, streamId: string): boolean; @@ -2169,7 +2154,7 @@ declare namespace dashjs { prepareForReplacementTrackSwitch(codec: string): Promise; - prepareForForceReplacementQualitySwitch(representationInfo: RepresentationInfo): Promise; + prepareForForceReplacementQualitySwitch(voRepresentation: Representation): Promise; prepareForNonReplacementTrackSwitch(codec: string): Promise; @@ -2181,7 +2166,7 @@ declare namespace dashjs { clearBuffers(ranges: Range[]): Promise; - updateBufferTimestampOffset(representationInfo: RepresentationInfo): Promise; + updateBufferTimestampOffset(voRepresentation: Representation): Promise; updateAppendWindow(): Promise; @@ -2255,7 +2240,7 @@ declare namespace dashjs { saveTextSettingsDisabled(): void; - isTracksEqual(t1: MediaInfo, t2: MediaInfo): boolean; + areTracksEqual(t1: MediaInfo, t2: MediaInfo): boolean; setConfig(config: object): void; @@ -2345,8 +2330,6 @@ declare namespace dashjs { getStreamId(): string; - setCurrentRepresentation(representationInfo: RepresentationInfo): void; - startScheduleTimer(value: object): void; clearScheduleTimer(): void; @@ -2674,8 +2657,6 @@ declare namespace dashjs { reset(): void; resetInitialSettings(): void; - - addExecutedRequest(request: HTTPRequest): void; } export interface AastLowLatencyThroughputModel { @@ -2685,7 +2666,7 @@ declare namespace dashjs { getThroughputCapacityDelayMS(request: HTTPRequest, currentBufferLevel: number): number; - getEstimaredDownloadDurationMS(request: HTTPRequest): number; + getEstimatedDownloadDurationMS(request: HTTPRequest): number; } export interface ThroughputModel { @@ -2694,7 +2675,7 @@ declare namespace dashjs { getThroughputDict(mediaType: MediaType): ThroughputDictEntry; getEwmaThroughputDict(mediaType: MediaType): ThroughputEwmaDictEntry; - + getEwmaLatencyDict(mediaType: MediaType): ThroughputEwmaDictEntry; getEwmaHalfLife(mediaType: MediaType): object; @@ -2771,13 +2752,13 @@ declare namespace dashjs { addRequestsQueue(mediaType: MediaType, loadingRequests: any[], executedRequests: any[]): void; - addManifestUpdate(mediaType: MediaType, type: string, requestTime: number, fetchTime: number, availabilityStartTime: number, presentationStartTime: number, clientTimeOffset: number, currentTime: number, buffered: RepresentationInfo, latency: number): void; + addManifestUpdate(mediaType: MediaType, type: string, requestTime: number, fetchTime: number): void; updateManifestUpdateInfo(manifestUpdate: ManifestUpdate, updatedFields: any[]): void; addManifestUpdateStreamInfo(manifestUpdate: ManifestUpdate, id: string, index: number, start: number, duration: number): void; - addManifestUpdateRepresentationInfo(manifestUpdate: ManifestUpdate, id: string, index: number, streamIndex: number, mediaType: MediaType, presentationTimeOffset: number, startNumber: number, fragmentInfoType: string): void; + addManifestUpdateRepresentationInfo(manifestUpdate: ManifestUpdate, representation: Representation, mediaType: MediaType): void; addPlayList(vo: any): void; @@ -3592,7 +3573,7 @@ declare namespace dashjs { getCurrentRequest(): SwitchRequest; getSwitchHistory(): SwitchRequestHistory; //pot. just Switch History - + getStreamInfo(): StreamInfo; getScheduleController(): ScheduleController; @@ -3601,7 +3582,7 @@ declare namespace dashjs { getAbrController(): AbrController; - getRepresentationInfo(): RepresentationInfo + getVoRepresentation(): Representation; getVideoModel(): VideoModel; } @@ -4085,13 +4066,21 @@ declare namespace dashjs { presentationStartTime: number; clientTimeOffset: number; currentTime: number | null; - buffered: RepresentationInfo; + buffered: object | null; latency: number; streamInfo: StreamInfo[]; - representationInfo: RepresentationInfo; + representationInfo: ManifestUpdateRepresentationInfo[]; } + export interface ManifestUpdateRepresentationInfo { + id: string | null; + index: number | null; + mediaType: MediaType | null; + presentationTimeOffset: number | null; + startNumber: number | null; + } + export interface PlayList { start: number | null; mstart: number | null; @@ -4464,7 +4453,7 @@ declare namespace dashjs { reset(): void; - updateTimestampOffset(MSETimeOffset: number): void; + updateTimestampOffset(mseTimeOffset: number): void; initializeForStreamSwitch(mInfo: MediaInfo, selectedRepresentation: Representation, oldSourceBufferSink: SourceBufferSink): Promise; @@ -4506,8 +4495,6 @@ declare namespace dashjs { getThumbnailController(): object; - getBitrateListFor(type: MediaType): BitrateInfo[]; - updateData(updatedStreamInfo: StreamInfo): void; reset(): void; @@ -4556,7 +4543,7 @@ declare namespace dashjs { getRepresentationController(): RepresentationController; - getRepresentationInfo(quality: number): RepresentationInfo; + getVoRepresentation(quality: number): Representation; getBufferLevel(): number; diff --git a/karma.unit.conf.cjs b/karma.unit.conf.cjs index d6f17d99b2..3400f4ea34 100644 --- a/karma.unit.conf.cjs +++ b/karma.unit.conf.cjs @@ -90,7 +90,7 @@ module.exports = function (config) { // start these browsers // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher - browsers: ['ChromeHeadless', 'FirefoxHeadless'], + browsers: ['ChromeHeadless'], // Continuous Integration mode // if true, Karma captures browsers, runs the tests and exits diff --git a/package.json b/package.json index d325d8c247..387c0936fd 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,6 @@ "start": "webpack serve --config build/webpack.dev.cjs", "lint": "eslint \"src/**/*.js\" test/unit/mocks/*.js test/unit/*.js ", "build": "tsc && rimraf dist && webpack --config build/webpack.prod.cjs", - "postbuild": "cp index.d.ts dash.d.ts", "doc": "jsdoc -c build/jsdoc/jsdoc_conf.json -d docs/jsdoc", "test": "karma start karma.unit.conf.cjs", "test-browserunit": "karma start build/karma.conf.cjs", diff --git a/samples/abr/LowestBitrateRule.js b/samples/abr/LowestBitrateRule.js index e7126708b9..64c51c4969 100644 --- a/samples/abr/LowestBitrateRule.js +++ b/samples/abr/LowestBitrateRule.js @@ -45,7 +45,7 @@ function LowestBitrateRuleClass() { } // Always use lowest bitrate - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { // here you can get some informations aboit metrics for example, to implement the rule let metricsModel = MetricsModel(context).getInstance(); var mediaType = rulesContext.getMediaInfo().type; @@ -74,7 +74,7 @@ function LowestBitrateRuleClass() { } instance = { - getMaxIndex: getMaxIndex + getSwitchRequest }; setup(); diff --git a/samples/dash-if-reference-player/app/main.js b/samples/dash-if-reference-player/app/main.js index d1ff97e00e..f7937546f1 100644 --- a/samples/dash-if-reference-player/app/main.js +++ b/samples/dash-if-reference-player/app/main.js @@ -422,9 +422,11 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' $scope.player.on(dashjs.MediaPlayer.events.REPRESENTATION_SWITCH, function (e) { var bitrate = Math.round(e.currentRepresentation.bandwidth / 1000); + var availableRepresentations = $scope.player.getRepresentationsByType(e.mediaType) + var maxIndex = availableRepresentations ? availableRepresentations.length : 0; - $scope[e.mediaType + 'PendingIndex'] = e.currentRepresentation.index + 1; - $scope[e.mediaType + 'PendingMaxIndex'] = e.numberOfRepresentations; + $scope[e.mediaType + 'PendingIndex'] = e.currentRepresentation.absoluteIndex + 1; + $scope[e.mediaType + 'PendingMaxIndex'] = maxIndex; $scope[e.mediaType + 'Bitrate'] = bitrate; $scope.plotPoint('pendingIndex', e.mediaType, e.newQuality + 1, getTimeForPlot()); $scope.safeApply(); @@ -436,14 +438,14 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' }, $scope); $scope.player.on(dashjs.MediaPlayer.events.QUALITY_CHANGE_RENDERED, function (e) { /* jshint ignore:line */ - $scope[e.mediaType + 'Index'] = e.newQuality + 1; + $scope[e.mediaType + 'Index'] = e.newRepresentation.absoluteIndex + 1; $scope.plotPoint('index', e.mediaType, e.newQuality + 1, getTimeForPlot()); $scope.safeApply(); }, $scope); $scope.player.on(dashjs.MediaPlayer.events.STREAM_INITIALIZED, function (e) { /* jshint ignore:line */ stopMetricsInterval(); - $scope.videoQualities = $scope.player.getBitrateInfoListFor('video'); + $scope.videoQualities = $scope.player.getRepresentationsByType('video'); $scope.chartCount = 0; $scope.metricsTimer = setInterval(function () { updateMetrics('video'); @@ -1977,10 +1979,13 @@ app.controller('DashController', ['$scope', '$window', 'sources', 'contributors' var period = dashAdapter.getPeriodById($scope.currentStreamInfo.id); var periodIdx = period ? period.index : $scope.currentStreamInfo.index; - var maxIndex = dashAdapter.getMaxIndexForBufferType(type, periodIdx); + var representations = $scope.player.getRepresentationsByType(type); + var maxIndex = representations ? representations.length : 1; var repSwitch = dashMetrics.getCurrentRepresentationSwitch(type, true); var bufferLevel = dashMetrics.getCurrentBufferLevel(type, true); - var index = $scope.player.getQualityFor(type); + if ($scope.player.getCurrentRepresentationForType(type)) { + var index = $scope.player.getCurrentRepresentationForType(type).absoluteIndex + 1; + } var bitrate = repSwitch ? Math.round(dashAdapter.getBandwidthForRepresentation(repSwitch.to, periodIdx) / 1000) : NaN; var droppedFramesMetrics = dashMetrics.getCurrentDroppedFrames(); diff --git a/samples/dash-if-reference-player/app/rules/DownloadRatioRule.js b/samples/dash-if-reference-player/app/rules/DownloadRatioRule.js index c18c2cc1eb..8f3c4b48ea 100644 --- a/samples/dash-if-reference-player/app/rules/DownloadRatioRule.js +++ b/samples/dash-if-reference-player/app/rules/DownloadRatioRule.js @@ -56,7 +56,7 @@ function DownloadRatioRuleClass() { }, 0); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { let mediaType = rulesContext.getMediaInfo().type; @@ -156,7 +156,7 @@ function DownloadRatioRuleClass() { } count = rulesContext.getMediaInfo().representationCount; - currentRepresentation = rulesContext.getRepresentationInfo(); + currentRepresentation = rulesContext.getVoRepresentation(); currentBandwidth = dashManifest.getBandwidth(currentRepresentation); for (i = 0; i < count; i += 1) { bandwidths.push(rulesContext.getMediaInfo().bitrateList[i].bandwidth); @@ -189,7 +189,7 @@ function DownloadRatioRuleClass() { } instance = { - getMaxIndex: getMaxIndex + getSwitchRequest }; setup(); diff --git a/samples/dash-if-reference-player/app/rules/ThroughputRule.js b/samples/dash-if-reference-player/app/rules/ThroughputRule.js index 93fca77564..c196b4ed44 100644 --- a/samples/dash-if-reference-player/app/rules/ThroughputRule.js +++ b/samples/dash-if-reference-player/app/rules/ThroughputRule.js @@ -47,7 +47,7 @@ function CustomThroughputRuleClass() { logger = Debug(context).getInstance().getLogger(instance); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { // here you can get some informations aboit metrics for example, to implement the rule let metricsModel = MetricsModel(context).getInstance(); var mediaType = rulesContext.getMediaInfo().type; @@ -60,7 +60,7 @@ function CustomThroughputRuleClass() { } instance = { - getMaxIndex: getMaxIndex + getSwitchRequest }; setup(); diff --git a/samples/dash-if-reference-player/app/sources.json b/samples/dash-if-reference-player/app/sources.json index b5464f2fc7..6566998a1c 100644 --- a/samples/dash-if-reference-player/app/sources.json +++ b/samples/dash-if-reference-player/app/sources.json @@ -404,6 +404,19 @@ { "name": "DRM (modern)", "submenu": [ + { + "name": "Multiperiod - Supplemental Property \"urn:mpeg:dash:adaptation-set-switching:2016\" ", + "url": "https://d24rwxnt7vw9qb.cloudfront.net/out/v1/a234169feb7b4b4ba9fd100b36629ae1/index.mpd", + "protData": { + "com.widevine.alpha": { + "serverURL": "https://lic.staging.drmtoday.com/license-proxy-widevine/cenc/?specConform=true", + "httpRequestHeaders": { + "x-dt-custom-data": "ewogICAgInVzZXJJZCI6ICJhd3MtZWxlbWVudGFsOjpzcGVrZS10ZXN0aW5nIiwKICAgICJzZXNzaW9uSWQiOiAidGVzdHNlc3Npb25tdWx0aWtleSIsCiAgICAibWVyY2hhbnQiOiAiYXdzLWVsZW1lbnRhbCIKfQ==" + } + } + }, + "provider": "aws" + }, { "name": "Multiperiod - Number + Timeline - Compact manifest - Thumbnails (1 track) - Encryption (1 key) PlayReady/Widevine (DRMtoday) - Key rotation (60s)", "url": "https://d24rwxnt7vw9qb.cloudfront.net/v1/dash/e6d234965645b411ad572802b6c9d5a10799c9c1/All_Reference_Streams/2fc23947945841b9b1be9768f9c13e75/index.mpd", diff --git a/src/core/Settings.js b/src/core/Settings.js index 51e6fda006..7cbebfa0b6 100644 --- a/src/core/Settings.js +++ b/src/core/Settings.js @@ -101,7 +101,7 @@ import Events from './events/Events.js'; * bufferTimeAtTopQuality: 30, * bufferTimeAtTopQualityLongForm: 60, * initialBufferLevel: NaN, - * bufferTimeDefault: 12, + * bufferTimeDefault: 18, * longFormContentDurationThreshold: 600, * stallThreshold: 0.3, * useAppendWindow: true, @@ -235,18 +235,10 @@ import Events from './events/Events.js'; * audio: -1, * video: -1 * }, - * maxRepresentationRatio: { - * audio: 1, - * video: 1 - * }, * initialBitrate: { * audio: -1, * video: -1 * }, - * initialRepresentationRatio: { - * audio: -1, - * video: -1 - * }, * autoSwitchBitrate: { * audio: true, * video: true @@ -348,7 +340,7 @@ import Events from './events/Events.js'; * * Allows you to modify the buffer that is kept in source buffer in seconds. * 0|-----------bufferToPrune-----------|-----bufferToKeep-----|currentTime| - * @property {number} [bufferTimeDefault=12] + * @property {number} [bufferTimeDefault=18] * The time that the internal buffer target will be set to when not playing at the top quality. * @property {number} [bufferTimeAtTopQuality=30] * The time that the internal buffer target will be set to once playing the top quality. @@ -630,24 +622,10 @@ import Events from './events/Events.js'; * The minimum bitrate that the ABR algorithms will choose. This value is specified in kbps. * * Use -1 for no limit. - * @property {module:Settings~AudioVideoSettings} [maxRepresentationRatio={audio: 1, video: 1}] - * When switching multi-bitrate content (auto or manual mode) this property specifies the maximum representation allowed, as a proportion of the size of the representation set. - * - * You can set or remove this cap at anytime before or during playback. - * - * To clear this setting you set the value to 1. - * - * If both this and maxAllowedBitrate are defined, maxAllowedBitrate is evaluated first, then maxAllowedRepresentation, i.e. the lowest value from executing these rules is used. - * - * This feature is typically used to reserve higher representations for playback only when connected over a fast connection. * @property {module:Settings~AudioVideoSettings} [initialBitrate={audio: -1, video: -1}] * Explicitly set the starting bitrate for audio or video. This value is specified in kbps. * * Use -1 to let the player decide. - * @property {module:Settings~AudioVideoSettings} [initialRepresentationRatio={audio: -1, video: -1}] - * Explicitly set the initial representation ratio. - * - * If initalBitrate is specified, this is ignored. * @property {module:Settings~AudioVideoSettings} [autoSwitchBitrate={audio: true, video: true}] * Indicates whether the player should enable ABR algorithms to switch the bitrate. @@ -935,7 +913,7 @@ function Settings() { bufferTimeAtTopQuality: 30, bufferTimeAtTopQualityLongForm: 60, initialBufferLevel: NaN, - bufferTimeDefault: 12, + bufferTimeDefault: 18, longFormContentDurationThreshold: 600, stallThreshold: 0.3, useAppendWindow: true, @@ -1037,6 +1015,7 @@ function Settings() { abr: { limitBitrateByPortal: false, usePixelRatioInLimitBitrateByPortal: false, + enableSupplementalPropertyAdaptationSetSwitching: true, activeRules: { throughputRule: true, bolaRule: true, @@ -1082,18 +1061,10 @@ function Settings() { audio: -1, video: -1 }, - maxRepresentationRatio: { - audio: 1, - video: 1 - }, initialBitrate: { audio: -1, video: -1 }, - initialRepresentationRatio: { - audio: -1, - video: -1 - }, autoSwitchBitrate: { audio: true, video: true @@ -1120,7 +1091,7 @@ function Settings() { audioChannelConfiguration: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', role: 'urn:mpeg:dash:role:2011', accessibility: 'urn:mpeg:dash:role:2011' - } + } }, errors: { recoverAttempts: { diff --git a/src/dash/DashAdapter.js b/src/dash/DashAdapter.js index df120a7a06..86855a2349 100644 --- a/src/dash/DashAdapter.js +++ b/src/dash/DashAdapter.js @@ -30,7 +30,6 @@ */ import DashConstants from './constants/DashConstants.js'; -import RepresentationInfo from './vo/RepresentationInfo.js'; import MediaInfo from './vo/MediaInfo.js'; import StreamInfo from './vo/StreamInfo.js'; import ManifestInfo from './vo/ManifestInfo.js'; @@ -38,6 +37,7 @@ import Event from './vo/Event.js'; import FactoryMaker from '../core/FactoryMaker.js'; import DashManifestModel from './models/DashManifestModel.js'; import PatchManifestModel from './models/PatchManifestModel.js'; +import Representation from './vo/Representation.js'; import {bcp47Normalize} from 'bcp-47-normalize'; /** @@ -50,7 +50,6 @@ function DashAdapter() { dashManifestModel, patchManifestModel, voPeriods, - currentMediaInfo, constants, cea608parser; @@ -85,33 +84,6 @@ function DashAdapter() { } } - /** - * Creates an instance of RepresentationInfo based on a representation value object - * @param {object} voRepresentation - * @returns {RepresentationInfo|null} representationInfo - * @memberOf module:DashAdapter - * @instance - * @ignore - */ - function convertRepresentationToRepresentationInfo(voRepresentation) { - if (voRepresentation) { - let representationInfo = new RepresentationInfo(); - const realAdaptation = voRepresentation.adaptation.period.mpd.manifest.Period[voRepresentation.adaptation.period.index].AdaptationSet[voRepresentation.adaptation.index]; - const realRepresentation = dashManifestModel.getRepresentationFor(voRepresentation.index, realAdaptation); - - representationInfo.id = voRepresentation.id; - representationInfo.quality = voRepresentation.index; - representationInfo.bandwidth = dashManifestModel.getBandwidth(realRepresentation); - representationInfo.fragmentDuration = voRepresentation.segmentDuration || (voRepresentation.segments && voRepresentation.segments.length > 0 ? voRepresentation.segments[0].duration : NaN); - representationInfo.MSETimeOffset = voRepresentation.MSETimeOffset; - representationInfo.mediaInfo = convertAdaptationToMediaInfo(voRepresentation.adaptation); - - return representationInfo; - } else { - return null; - } - } - /** * Returns a MediaInfo object for a given media type and the corresponding streamInfo. * @param {object} streamInfo @@ -130,7 +102,7 @@ function DashAdapter() { const voAdaptations = dashManifestModel.getAdaptationsForPeriod(selectedVoPeriod); - let realAdaptation = getAdaptationForType(streamInfo.index, type, streamInfo); + let realAdaptation = getMainAdaptationForType(type, streamInfo); if (!realAdaptation) return null; let idx = dashManifestModel.getIndexForAdaptation(realAdaptation, voPeriods[0].mpd.manifest, streamInfo.index); @@ -159,22 +131,13 @@ function DashAdapter() { * @memberOf module:DashAdapter * @instance */ - function getAdaptationForType(periodIndex, type, streamInfo) { - const adaptations = dashManifestModel.getAdaptationsForType(voPeriods[0].mpd.manifest, periodIndex, type); + function getMainAdaptationForType(type, streamInfo) { + const index = streamInfo ? streamInfo.index : 0; + const adaptations = dashManifestModel.getAdaptationsForType(voPeriods[index].mpd.manifest, index, type); if (!adaptations || adaptations.length === 0) return null; if (adaptations.length > 1 && streamInfo) { - const allMediaInfoForType = getAllMediaInfoForType(streamInfo, type); - - if (currentMediaInfo[streamInfo.id] && currentMediaInfo[streamInfo.id][type]) { - for (let i = 0, ln = adaptations.length; i < ln; i++) { - if (areMediaInfosEqual(currentMediaInfo[streamInfo.id][type], allMediaInfoForType[i])) { - return adaptations[i]; - } - } - } - for (let i = 0, ln = adaptations.length; i < ln; i++) { if (getIsMain(adaptations[i])) { return adaptations[i]; @@ -445,7 +408,7 @@ function DashAdapter() { let voReps; const voAdaptation = getAdaptationForMediaInfo(mediaInfo); - voReps = dashManifestModel.getRepresentationsForAdaptation(voAdaptation); + voReps = dashManifestModel.getRepresentationsForAdaptation(voAdaptation, mediaInfo); return voReps; } @@ -510,7 +473,7 @@ function DashAdapter() { } /** - * Returns the events for the given info object. info can either be an instance of StreamInfo, MediaInfo or RepresentationInfo + * Returns the events for the given info object. info can either be an instance of StreamInfo, MediaInfo or Representation * @param {object} info * @param {object} voRepresentation * @returns {Array} @@ -530,7 +493,7 @@ function DashAdapter() { } else if (info instanceof MediaInfo) { const period = getPeriodForStreamInfo(streamInfo, voPeriods) events = dashManifestModel.getEventStreamForAdaptationSet(manifest, getAdaptationForMediaInfo(info), period); - } else if (info instanceof RepresentationInfo) { + } else if (info instanceof Representation) { const period = getPeriodForStreamInfo(streamInfo, voPeriods) events = dashManifestModel.getEventStreamForRepresentation(manifest, voRepresentation, period); } @@ -539,20 +502,6 @@ function DashAdapter() { return events; } - /** - * Sets the current active mediaInfo for a given streamId and a given mediaType - * @param {number} streamId - * @param {MediaType} type - * @param {object} mediaInfo - * @memberOf module:DashAdapter - * @instance - * @ignore - */ - function setCurrentMediaInfo(streamId, type, mediaInfo) { - currentMediaInfo[streamId] = currentMediaInfo[streamId] || {}; - currentMediaInfo[streamId][type] = currentMediaInfo[streamId][type] || {}; - currentMediaInfo[streamId][type] = mediaInfo; - } /** * Check if the given type is a text track @@ -805,20 +754,6 @@ function DashAdapter() { return findRepresentationIndex(period, representationId); } - /** - * This method returns the current max index based on what is defined in the MPD. - * @param {string} bufferType - String 'audio' or 'video', - * @param {number} periodIdx - Make sure this is the period index not id - * @return {number} - * @memberOf module:DashAdapter - * @instance - */ - function getMaxIndexForBufferType(bufferType, periodIdx) { - let period = getPeriod(periodIdx); - - return findMaxBufferIndex(period, bufferType); - } - /** * Returns the voPeriod object for a given id * @param {String} id @@ -855,7 +790,6 @@ function DashAdapter() { function reset() { voPeriods = []; - currentMediaInfo = {}; } /** @@ -1062,11 +996,11 @@ function DashAdapter() { mediaInfo.isText = dashManifestModel.getIsText(realAdaptation); mediaInfo.supplementalProperties = dashManifestModel.getSupplementalPropertiesForAdaptation(realAdaptation); - if ( (!mediaInfo.supplementalProperties || mediaInfo.supplementalProperties.length === 0) && realAdaptation.Representation && realAdaptation.Representation.length > 0) { - let arr = realAdaptation.Representation.map( repr => { + if ((!mediaInfo.supplementalProperties || mediaInfo.supplementalProperties.length === 0) && realAdaptation.Representation && realAdaptation.Representation.length > 0) { + let arr = realAdaptation.Representation.map(repr => { return dashManifestModel.getSupplementalPropertiesForRepresentation(repr); }); - if ( arr.every( v => JSON.stringify(v) === JSON.stringify(arr[0]) ) ) { + if (arr.every(v => JSON.stringify(v) === JSON.stringify(arr[0]))) { // only output Representation.supplementalProperties to mediaInfo, if they are present on all Representations mediaInfo.supplementalProperties = arr[0]; } @@ -1074,6 +1008,22 @@ function DashAdapter() { mediaInfo.isFragmented = dashManifestModel.getIsFragmented(realAdaptation); mediaInfo.isEmbedded = false; + mediaInfo.hasProtectedRepresentations = dashManifestModel.getAdaptationHasProtectedRepresentations(realAdaptation); + + // Save IDs of AS that we can switch to + try { + const adaptationSetSwitching = mediaInfo.supplementalProperties.filter((sp) => { + return sp.schemeIdUri === DashConstants.ADAPTATION_SET_SWITCHING_SCHEME_ID_URI + }); + if (adaptationSetSwitching && adaptationSetSwitching.length > 0) { + const ids = adaptationSetSwitching[0].value.toString().split(',') + mediaInfo.adaptationSetSwitchingCompatibleIds = ids.map((id) => { + return id + }) + } + } catch (e) { + return mediaInfo; + } return mediaInfo; } @@ -1086,7 +1036,7 @@ function DashAdapter() { mediaInfo.isEmbedded = true; mediaInfo.isFragmented = false; mediaInfo.lang = bcp47Normalize(lang); - mediaInfo.roles = [{schemeIdUri:'urn:mpeg:dash:role:2011', value:'caption'}]; + mediaInfo.roles = [{ schemeIdUri: 'urn:mpeg:dash:role:2011', value: 'caption' }]; } function convertVideoInfoToThumbnailInfo(mediaInfo) { @@ -1168,71 +1118,48 @@ function DashAdapter() { return null; } - function findMaxBufferIndex(period, bufferType) { - let adaptationSet, - adaptationSetArray, - representationArray, - adaptationSetArrayIndex; - - if (!period || !bufferType) return -1; - - adaptationSetArray = period.AdaptationSet; - for (adaptationSetArrayIndex = 0; adaptationSetArrayIndex < adaptationSetArray.length; adaptationSetArrayIndex = adaptationSetArrayIndex + 1) { - adaptationSet = adaptationSetArray[adaptationSetArrayIndex]; - representationArray = adaptationSet.Representation; - if (dashManifestModel.getIsTypeOf(adaptationSet, bufferType)) { - return representationArray.length; - } - } - - return -1; - } - // #endregion PRIVATE FUNCTIONS instance = { - getBandwidthForRepresentation, - getIndexForRepresentation, - getMaxIndexForBufferType, - convertRepresentationToRepresentationInfo, - getStreamsInfo, - getMediaInfoForType, + applyPatchToManifest, + getMainAdaptationForType, + areMediaInfosEqual, getAllMediaInfoForType, - getAdaptationForType, - getRealAdaptation, - getProducerReferenceTimes, - getRealPeriodByIndex, + getAvailabilityStartTime, + getBandwidthForRepresentation, + getBaseURLsFromElement, + getCodec, + getContentSteering, + getDuration, getEssentialPropertiesForRepresentation, - getVoRepresentations, - getEventsFor, getEvent, - getMpd, - setConfig, - updatePeriods, + getEventsFor, + getIndexForRepresentation, + getIsDVB, + getIsDynamic, + getIsPatch, getIsTextTrack, - getUTCTimingSources, - getSuggestedPresentationDelay, - getAvailabilityStartTime, getIsTypeOf, - getIsDynamic, - getDuration, - getRegularPeriods, - getContentSteering, getLocation, - getPatchLocation, getManifestUpdatePeriod, + getMediaInfoForType, + getMpd, + getPatchLocation, + getPeriodById, + getProducerReferenceTimes, getPublishTime, - getIsDVB, - getIsPatch, - getBaseURLsFromElement, + getRealAdaptation, + getRealPeriodByIndex, + getRegularPeriods, getRepresentationSortFunction, - getCodec, - getPeriodById, - setCurrentMediaInfo, + getStreamsInfo, + getSuggestedPresentationDelay, + getUTCTimingSources, + getVoRepresentations, isPatchValid, - applyPatchToManifest, - areMediaInfosEqual, - reset + reset, + setConfig, + updatePeriods, }; setup(); diff --git a/src/dash/DashHandler.js b/src/dash/DashHandler.js index eccd541bbc..12c094f3a4 100644 --- a/src/dash/DashHandler.js +++ b/src/dash/DashHandler.js @@ -140,9 +140,7 @@ function DashHandler(config) { request.range = representation.range; request.availabilityStartTime = timelineConverter.calcAvailabilityStartTimeFromPresentationTime(presentationStartTime, representation, isDynamicManifest); request.availabilityEndTime = timelineConverter.calcAvailabilityEndTimeFromPresentationTime(presentationStartTime + period.duration, representation, isDynamicManifest); - request.quality = representation.index; - request.mediaInfo = mediaInfo; - request.representationId = representation.id; + request.representation = representation; if (_setRequestUrl(request, representation.initialization, representation)) { request.url = replaceTokenForTemplate(request.url, 'Bandwidth', representation.bandwidth); @@ -157,7 +155,7 @@ function DashHandler(config) { const request = new FragmentRequest(); const representation = segment.representation; - const bandwidth = representation.adaptation.period.mpd.manifest.Period[representation.adaptation.period.index].AdaptationSet[representation.adaptation.index].Representation[representation.index].bandwidth; + const bandwidth = representation.bandwidth; let url = segment.media; url = replaceTokenForTemplate(url, 'Number', segment.replacementNumber); @@ -167,6 +165,7 @@ function DashHandler(config) { url = unescapeDollarsInTemplate(url); request.mediaType = getType(); + request.bandwidth = representation.bandwidth; request.type = HTTPRequest.MEDIA_SEGMENT_TYPE; request.range = segment.mediaRange; request.startTime = segment.presentationStartTime; @@ -177,11 +176,9 @@ function DashHandler(config) { request.availabilityEndTime = segment.availabilityEndTime; request.availabilityTimeComplete = representation.availabilityTimeComplete; request.wallStartTime = segment.wallStartTime; - request.quality = representation.index; request.index = segment.index; - request.mediaInfo = mediaInfo; request.adaptationIndex = representation.adaptation.index; - request.representationId = representation.id; + request.representation = representation; if (_setRequestUrl(request, url, representation)) { return request; diff --git a/src/dash/DashMetrics.js b/src/dash/DashMetrics.js index 299665f70b..5d00958010 100644 --- a/src/dash/DashMetrics.js +++ b/src/dash/DashMetrics.js @@ -282,7 +282,7 @@ function DashMetrics(config) { request.startTime, request.availabilityStartTime, request.duration, - request.quality, + request.bandwidth, request.range, state); } @@ -359,7 +359,7 @@ function DashMetrics(config) { function addManifestUpdateRepresentationInfo(representation, mediaType) { if (representation) { const manifestUpdateInfo = this.getCurrentManifestUpdate(); - metricsModel.addManifestUpdateRepresentationInfo(manifestUpdateInfo, representation.id, representation.index, representation.streamIndex, mediaType, representation.presentationTimeOffset, representation.startNumber, representation.fragmentInfoType); + metricsModel.addManifestUpdateRepresentationInfo(manifestUpdateInfo, representation, mediaType); } } diff --git a/src/dash/constants/DashConstants.js b/src/dash/constants/DashConstants.js index 55cbc10822..049039077c 100644 --- a/src/dash/constants/DashConstants.js +++ b/src/dash/constants/DashConstants.js @@ -22,8 +22,8 @@ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA, OR + * PROFITS, OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. @@ -31,156 +31,143 @@ /** * Dash constants declaration - * @class * @ignore */ -class DashConstants { - - init() { - this.BASE_URL = 'BaseURL'; - this.SEGMENT_BASE = 'SegmentBase'; - this.SEGMENT_TEMPLATE = 'SegmentTemplate'; - this.SEGMENT_LIST = 'SegmentList'; - this.SEGMENT_URL = 'SegmentURL'; - this.SEGMENT_TIMELINE = 'SegmentTimeline'; - this.S = 'S'; - this.SEGMENT_PROFILES = 'segmentProfiles'; - this.ADAPTATION_SET = 'AdaptationSet'; - this.REPRESENTATION = 'Representation'; - this.REPRESENTATION_INDEX = 'RepresentationIndex'; - this.SUB_REPRESENTATION = 'SubRepresentation'; - this.INITIALIZATION = 'Initialization'; - this.INITIALIZATION_MINUS = 'initialization'; - this.MPD = 'MPD'; - this.PERIOD = 'Period'; - this.ASSET_IDENTIFIER = 'AssetIdentifier'; - this.EVENT_STREAM = 'EventStream'; - this.EVENT = 'Event'; - this.ID = 'id'; - this.PROFILES = 'profiles'; - this.LOCATION = 'Location'; - this.SERVICE_LOCATION = 'serviceLocation'; - this.RANGE = 'range'; - this.INDEX = 'index'; - this.MEDIA = 'media'; - this.BYTE_RANGE = 'byteRange'; - this.INDEX_RANGE = 'indexRange'; - this.MEDIA_RANGE = 'mediaRange'; - this.VALUE = 'value'; - this.CONTENT_TYPE = 'contentType'; - this.MIME_TYPE = 'mimeType'; - this.BITSTREAM_SWITCHING = 'BitstreamSwitching'; - this.BITSTREAM_SWITCHING_MINUS = 'bitstreamSwitching'; - this.CODECS = 'codecs'; - this.DEPENDENCY_ID = 'dependencyId'; - this.MEDIA_STREAM_STRUCTURE_ID = 'mediaStreamStructureId'; - this.METRICS = 'Metrics'; - this.METRICS_MINUS = 'metrics'; - this.REPORTING = 'Reporting'; - this.WIDTH = 'width'; - this.HEIGHT = 'height'; - this.SAR = 'sar'; - this.FRAMERATE = 'frameRate'; - this.AUDIO_SAMPLING_RATE = 'audioSamplingRate'; - this.MAXIMUM_SAP_PERIOD = 'maximumSAPPeriod'; - this.START_WITH_SAP = 'startWithSAP'; - this.MAX_PLAYOUT_RATE = 'maxPlayoutRate'; - this.CODING_DEPENDENCY = 'codingDependency'; - this.SCAN_TYPE = 'scanType'; - this.FRAME_PACKING = 'FramePacking'; - this.AUDIO_CHANNEL_CONFIGURATION = 'AudioChannelConfiguration'; - this.CONTENT_PROTECTION = 'ContentProtection'; - this.ESSENTIAL_PROPERTY = 'EssentialProperty'; - this.LABEL = 'Label'; - this.SUPPLEMENTAL_PROPERTY = 'SupplementalProperty'; - this.INBAND_EVENT_STREAM = 'InbandEventStream'; - this.PRODUCER_REFERENCE_TIME = 'ProducerReferenceTime'; - this.INBAND = 'inband'; - this.TYPE = 'type'; - this.ACCESSIBILITY = 'Accessibility'; - this.ROLE = 'Role'; - this.RATING = 'Rating'; - this.CONTENT_COMPONENT = 'ContentComponent'; - this.SUBSET = 'Subset'; - this.LANG = 'lang'; - this.VIEWPOINT = 'Viewpoint'; - this.ROLE = 'Role'; - this.ACCESSIBILITY = 'Accessibility'; - this.MAIN = 'main'; - this.DYNAMIC = 'dynamic'; - this.STATIC = 'static'; - this.MEDIA_PRESENTATION_DURATION = 'mediaPresentationDuration'; - this.MINIMUM_UPDATE_PERIOD = 'minimumUpdatePeriod'; - this.CODEC_PRIVATE_DATA = 'codecPrivateData'; - this.BANDWITH = 'bandwidth'; - this.SOURCE_URL = 'sourceURL'; - this.TIMESCALE = 'timescale'; - this.DURATION = 'duration'; - this.START_NUMBER = 'startNumber'; - this.PRESENTATION_TIME_OFFSET = 'presentationTimeOffset'; - this.AVAILABILITY_START_TIME = 'availabilityStartTime'; - this.AVAILABILITY_END_TIME = 'availabilityEndTime'; - this.TIMESHIFT_BUFFER_DEPTH = 'timeShiftBufferDepth'; - this.MAX_SEGMENT_DURATION = 'maxSegmentDuration'; - this.PRESENTATION_TIME = 'presentationTime'; - this.MIN_BUFFER_TIME = 'minBufferTime'; - this.MAX_SUBSEGMENT_DURATION = 'maxSubsegmentDuration'; - this.START = 'start'; - this.AVAILABILITY_TIME_OFFSET = 'availabilityTimeOffset'; - this.AVAILABILITY_TIME_COMPLETE = 'availabilityTimeComplete'; - this.CENC_DEFAULT_KID = 'cenc:default_KID'; - this.DVB_PRIORITY = 'dvb:priority'; - this.DVB_WEIGHT = 'dvb:weight'; - this.SUGGESTED_PRESENTATION_DELAY = 'suggestedPresentationDelay'; - this.SERVICE_DESCRIPTION = 'ServiceDescription'; - this.SERVICE_DESCRIPTION_SCOPE = 'Scope'; - this.SERVICE_DESCRIPTION_LATENCY = 'Latency'; - this.SERVICE_DESCRIPTION_PLAYBACK_RATE = 'PlaybackRate'; - this.SERVICE_DESCRIPTION_OPERATING_QUALITY = 'OperatingQuality'; - this.SERVICE_DESCRIPTION_OPERATING_BANDWIDTH = 'OperatingBandwidth'; - this.PATCH_LOCATION = 'PatchLocation'; - this.LOCATION = 'Location'; - this.PUBLISH_TIME = 'publishTime'; - this.ORIGINAL_PUBLISH_TIME = 'originalPublishTime'; - this.ORIGINAL_MPD_ID = 'mpdId'; - this.REPLACE = 'replace'; - this.ADD = 'add'; - this.REMOVE = 'remove'; - this.WALL_CLOCK_TIME = 'wallClockTime'; - this.PRESENTATION_TIME = 'presentationTime'; - this.UTC_TIMING = 'UTCTiming'; - this.LABEL = 'Label'; - this.GROUP_LABEL = 'GroupLabel'; - this.CONTENT_STEERING = 'ContentSteering'; - this.DEFAULT_SERVICE_LOCATION = 'defaultServiceLocation'; - this.QUERY_BEFORE_START = 'queryBeforeStart'; - this.CLIENT_REQUIREMENT = 'clientRequirement'; - this.TTL = 'ttl'; - this.CONTENT_STEERING_RESPONSE = { - VERSION: 'VERSION', - TTL: 'TTL', - RELOAD_URI: 'RELOAD-URI', - PATHWAY_PRIORITY : 'PATHWAY-PRIORITY', - PATHWAY_CLONES : 'PATHWAY-CLONES', - BASE_ID: 'BASE-ID', - ID: 'ID', - URI_REPLACEMENT: 'URI-REPLACEMENT', - HOST: 'HOST', - PARAMS: 'PARAMS' - }; - this.PRODUCER_REFERENCE_TIME_TYPE = { - ENCODER: 'encoder', - CAPTURED: 'captured', - APPLICATION: 'application' - } - this.SEGMENT_ALIGNMENT = 'segmentAlignment'; - this.SUB_SEGMENT_ALIGNMENT = 'subsegmentAlignment' - } - - constructor () { - this.init(); - } +export default { + ACCESSIBILITY: 'Accessibility', + ADAPTATION_SET: 'AdaptationSet', + ADAPTATION_SET_SWITCHING_SCHEME_ID_URI: 'urn:mpeg:dash:adaptation-set-switching:2016', + ADD: 'add', + ASSET_IDENTIFIER: 'AssetIdentifier', + AUDIO_CHANNEL_CONFIGURATION: 'AudioChannelConfiguration', + AUDIO_SAMPLING_RATE: 'audioSamplingRate', + AVAILABILITY_END_TIME: 'availabilityEndTime', + AVAILABILITY_START_TIME: 'availabilityStartTime', + AVAILABILITY_TIME_COMPLETE: 'availabilityTimeComplete', + AVAILABILITY_TIME_OFFSET: 'availabilityTimeOffset', + BANDWITH: 'bandwidth', + BASE_URL: 'BaseURL', + BITSTREAM_SWITCHING: 'BitstreamSwitching', + BITSTREAM_SWITCHING_MINUS: 'bitstreamSwitching', + BYTE_RANGE: 'byteRange', + CENC_DEFAULT_KID: 'cenc:default_KID', + CLIENT_REQUIREMENT: 'clientRequirement', + CODECS: 'codecs', + CODEC_PRIVATE_DATA: 'codecPrivateData', + CODING_DEPENDENCY: 'codingDependency', + CONTENT_COMPONENT: 'ContentComponent', + CONTENT_PROTECTION: 'ContentProtection', + CONTENT_STEERING: 'ContentSteering', + CONTENT_STEERING_RESPONSE: { + VERSION: 'VERSION', + TTL: 'TTL', + RELOAD_URI: 'RELOAD-URI', + PATHWAY_PRIORITY: 'PATHWAY-PRIORITY', + PATHWAY_CLONES: 'PATHWAY-CLONES', + BASE_ID: 'BASE-ID', + ID: 'ID', + URI_REPLACEMENT: 'URI-REPLACEMENT', + HOST: 'HOST', + PARAMS: 'PARAMS' + }, + CONTENT_TYPE: 'contentType', + DEFAULT_SERVICE_LOCATION: 'defaultServiceLocation', + DEPENDENCY_ID: 'dependencyId', + DURATION: 'duration', + DVB_PRIORITY: 'dvb:priority', + DVB_WEIGHT: 'dvb:weight', + DYNAMIC: 'dynamic', + ESSENTIAL_PROPERTY: 'EssentialProperty', + EVENT: 'Event', + EVENT_STREAM: 'EventStream', + FRAMERATE: 'frameRate', + FRAME_PACKING: 'FramePacking', + GROUP_LABEL: 'GroupLabel', + HEIGHT: 'height', + ID: 'id', + INBAND: 'inband', + INBAND_EVENT_STREAM: 'InbandEventStream', + INDEX: 'index', + INDEX_RANGE: 'indexRange', + INITIALIZATION: 'Initialization', + INITIALIZATION_MINUS: 'initialization', + LABEL: 'Label', + LANG: 'lang', + LOCATION: 'Location', + MAIN: 'main', + MAXIMUM_SAP_PERIOD: 'maximumSAPPeriod', + MAX_PLAYOUT_RATE: 'maxPlayoutRate', + MAX_SEGMENT_DURATION: 'maxSegmentDuration', + MAX_SUBSEGMENT_DURATION: 'maxSubsegmentDuration', + MEDIA: 'media', + MEDIA_PRESENTATION_DURATION: 'mediaPresentationDuration', + MEDIA_RANGE: 'mediaRange', + MEDIA_STREAM_STRUCTURE_ID: 'mediaStreamStructureId', + METRICS: 'Metrics', + METRICS_MINUS: 'metrics', + MIME_TYPE: 'mimeType', + MINIMUM_UPDATE_PERIOD: 'minimumUpdatePeriod', + MIN_BUFFER_TIME: 'minBufferTime', + MPD: 'MPD', + ORIGINAL_MPD_ID: 'mpdId', + ORIGINAL_PUBLISH_TIME: 'originalPublishTime', + PATCH_LOCATION: 'PatchLocation', + PERIOD: 'Period', + PRESENTATION_TIME: 'presentationTime', + PRESENTATION_TIME_OFFSET: 'presentationTimeOffset', + PRODUCER_REFERENCE_TIME: 'ProducerReferenceTime', + PRODUCER_REFERENCE_TIME_TYPE: { + ENCODER: 'encoder', + CAPTURED: 'captured', + APPLICATION: 'application' + }, + PROFILES: 'profiles', + PUBLISH_TIME: 'publishTime', + QUALITY_RANKING : 'qualityRanking', + QUERY_BEFORE_START: 'queryBeforeStart', + RANGE: 'range', + RATING: 'Rating', + REMOVE: 'remove', + REPLACE: 'replace', + REPORTING: 'Reporting', + REPRESENTATION: 'Representation', + REPRESENTATION_INDEX: 'RepresentationIndex', + ROLE: 'Role', + S: 'S', + SAR: 'sar', + SCAN_TYPE: 'scanType', + SEGMENT_ALIGNMENT: 'segmentAlignment', + SEGMENT_BASE: 'SegmentBase', + SEGMENT_LIST: 'SegmentList', + SEGMENT_PROFILES: 'segmentProfiles', + SEGMENT_TEMPLATE: 'SegmentTemplate', + SEGMENT_TIMELINE: 'SegmentTimeline', + SEGMENT_URL: 'SegmentURL', + SERVICE_DESCRIPTION: 'ServiceDescription', + SERVICE_DESCRIPTION_LATENCY: 'Latency', + SERVICE_DESCRIPTION_OPERATING_BANDWIDTH: 'OperatingBandwidth', + SERVICE_DESCRIPTION_OPERATING_QUALITY: 'OperatingQuality', + SERVICE_DESCRIPTION_PLAYBACK_RATE: 'PlaybackRate', + SERVICE_DESCRIPTION_SCOPE: 'Scope', + SERVICE_LOCATION: 'serviceLocation', + SOURCE_URL: 'sourceURL', + START: 'start', + START_NUMBER: 'startNumber', + START_WITH_SAP: 'startWithSAP', + STATIC: 'static', + SUBSET: 'Subset', + SUB_REPRESENTATION: 'SubRepresentation', + SUB_SEGMENT_ALIGNMENT: 'subsegmentAlignment', + SUGGESTED_PRESENTATION_DELAY: 'suggestedPresentationDelay', + SUPPLEMENTAL_PROPERTY: 'SupplementalProperty', + TIMESCALE: 'timescale', + TIMESHIFT_BUFFER_DEPTH: 'timeShiftBufferDepth', + TTL: 'ttl', + TYPE: 'type', + UTC_TIMING: 'UTCTiming', + VALUE: 'value', + VIEWPOINT: 'Viewpoint', + WALL_CLOCK_TIME: 'wallClockTime', + WIDTH: 'width', } -let constants = new DashConstants(); -export default constants; diff --git a/src/dash/controllers/RepresentationController.js b/src/dash/controllers/RepresentationController.js index e3aee19c55..a5969338f5 100644 --- a/src/dash/controllers/RepresentationController.js +++ b/src/dash/controllers/RepresentationController.js @@ -44,16 +44,11 @@ function RepresentationController(config) { const timelineConverter = config.timelineConverter; const type = config.type; const streamInfo = config.streamInfo; - const dashConstants = config.dashConstants; const segmentsController = config.segmentsController; const isDynamic = config.isDynamic; - const adapter = config.adapter; let instance, - realAdaptation, - updating, voAvailableRepresentations, - currentRepresentationInfo, currentVoRepresentation; function setup() { @@ -76,27 +71,12 @@ function RepresentationController(config) { } } - function getData() { - return realAdaptation; - } - - function isUpdating() { - return updating; - } - function getCurrentRepresentation() { return currentVoRepresentation; } - function getCurrentRepresentationInfo() { - return currentRepresentationInfo - } - function resetInitialSettings() { - realAdaptation = null; - updating = true; voAvailableRepresentations = []; - currentRepresentationInfo = null; } function reset() { @@ -105,13 +85,12 @@ function RepresentationController(config) { resetInitialSettings(); } - function updateData(newRealAdaptation, availableRepresentations, type, isFragmented, quality) { + function updateData(availableRepresentations, isFragmented, selectedRepresentationId) { return new Promise((resolve, reject) => { - updating = true; voAvailableRepresentations = availableRepresentations; - realAdaptation = newRealAdaptation; - const rep = getRepresentationForQuality(quality) - _setCurrentVoRepresentation(rep); + const selectedRepresentation = getRepresentationById(selectedRepresentationId); + _setCurrentVoRepresentation(selectedRepresentation); + if (type !== Constants.VIDEO && type !== Constants.AUDIO && (type !== Constants.TEXT || !isFragmented)) { endDataUpdate(); @@ -127,16 +106,23 @@ function RepresentationController(config) { Promise.all(promises) .then(() => { - // Update the current representation again as we have now the reference to the segments - const rep = getRepresentationForQuality(quality) - _setCurrentVoRepresentation(rep); + _onAllRepresentationsUpdated(); resolve(); }) .catch((e) => { reject(e); }) }) + } + + function _onAllRepresentationsUpdated() { + abrController.setPlaybackQuality(type, streamInfo, currentVoRepresentation); + const dvrInfo = dashMetrics.getCurrentDVRInfo(type); + if (dvrInfo) { + dashMetrics.updateManifestUpdateInfo({ latency: dvrInfo.range.end - playbackController.getTime() }); + } + endDataUpdate(); } function _updateRepresentation(currentRep) { @@ -159,6 +145,7 @@ function RepresentationController(config) { if (data[1] && !data[1].error) { currentRep = _onSegmentsLoaded(currentRep, data[1]); } + currentRep.fragmentDuration = currentRep.segmentDuration ? currentRep.segmentDuration : currentRep.segments && currentRep.segments.length > 0 ? currentRep.segments[0].duration : NaN; _setMediaFinishedInformation(currentRep); _onRepresentationUpdated(currentRep); resolve(); @@ -220,10 +207,9 @@ function RepresentationController(config) { return representation; } - function _addRepresentationSwitch() { + function _addRepresentationSwitch(currentRepresentation) { checkConfig(); const now = new Date(); - const currentRepresentation = getCurrentRepresentation(); const currentVideoTimeMs = playbackController.getTime() * 1000; if (currentRepresentation) { dashMetrics.addRepresentationSwitch(currentRepresentation.adaptation.type, now, currentVideoTimeMs, currentRepresentation.id); @@ -233,36 +219,28 @@ function RepresentationController(config) { mediaType: type, streamId: streamInfo.id, currentRepresentation, - numberOfRepresentations: voAvailableRepresentations.length }, { streamId: streamInfo.id, mediaType: type }) } - function getRepresentationForQuality(quality) { - return quality === null || quality === undefined || quality >= voAvailableRepresentations.length ? null : voAvailableRepresentations[quality]; - } + function getRepresentationById(id) { + if (!voAvailableRepresentations || voAvailableRepresentations.length === 0) { + return null; + } - function getQualityForRepresentation(voRepresentation) { - return voAvailableRepresentations.indexOf(voRepresentation); - } + const reps = voAvailableRepresentations.filter((rep) => { + return rep.id === id; + }) - function isAllRepresentationsUpdated() { - for (let i = 0, ln = voAvailableRepresentations.length; i < ln; i++) { - let segmentInfoType = voAvailableRepresentations[i].segmentInfoType; - if (!voAvailableRepresentations[i].hasInitialization() || - ((segmentInfoType === dashConstants.SEGMENT_BASE || segmentInfoType === dashConstants.BASE_URL) && !voAvailableRepresentations[i].segments) - ) { - return false; - } + if (reps.length > 0) { + return reps[0] } - return true; + return null; } function endDataUpdate(error) { - updating = false; eventBus.trigger(events.DATA_UPDATE_COMPLETED, { - data: realAdaptation, currentRepresentation: currentVoRepresentation, error: error }, @@ -271,12 +249,9 @@ function RepresentationController(config) { } function _onRepresentationUpdated(r) { - if (!isUpdating()) return; - let manifestUpdateInfo = dashMetrics.getCurrentManifestUpdate(); let alreadyAdded = false; - let repInfo, - repSwitch; + let repInfo; if (manifestUpdateInfo) { @@ -292,32 +267,28 @@ function RepresentationController(config) { dashMetrics.addManifestUpdateRepresentationInfo(r, getType()); } } + } - if (isAllRepresentationsUpdated()) { - abrController.setPlaybackQuality(type, streamInfo, getQualityForRepresentation(currentVoRepresentation)); - const dvrInfo = dashMetrics.getCurrentDVRInfo(type); - if (dvrInfo) { - dashMetrics.updateManifestUpdateInfo({ latency: dvrInfo.range.end - playbackController.getTime() }); - } - - repSwitch = dashMetrics.getCurrentRepresentationSwitch(getCurrentRepresentation().adaptation.type); + /** + * We get the new selected Representation which will not hold the ranges and the segment references in case of SegmentBase. + * In any case use the id to find the right Representation instance in our array of Representations. + * @param newRep + */ + function prepareQualityChange(newRep) { + const voRepresentations = voAvailableRepresentations.filter((rep) => { + return rep.id === newRep.id; + }) - if (!repSwitch) { - _addRepresentationSwitch(); - } - endDataUpdate(); + if (voRepresentations.length > 0) { + _setCurrentVoRepresentation(voRepresentations[0]); } } - function prepareQualityChange(newQuality) { - const newRep = getRepresentationForQuality(newQuality) - _setCurrentVoRepresentation(newRep); - _addRepresentationSwitch(); - } - function _setCurrentVoRepresentation(value) { + if (!currentVoRepresentation || currentVoRepresentation.id !== value.id) { + _addRepresentationSwitch(value); + } currentVoRepresentation = value; - currentRepresentationInfo = adapter.convertRepresentationToRepresentationInfo(currentVoRepresentation); } function onManifestValidityChanged(e) { @@ -331,16 +302,13 @@ function RepresentationController(config) { } instance = { + getCurrentRepresentation, + getRepresentationById, getStreamId, getType, - getData, - isUpdating, - updateData, - getCurrentRepresentation, - getCurrentRepresentationInfo, - getRepresentationForQuality, prepareQualityChange, - reset + reset, + updateData, }; setup(); diff --git a/src/dash/models/DashManifestModel.js b/src/dash/models/DashManifestModel.js index 9a90e2d436..7458f93929 100644 --- a/src/dash/models/DashManifestModel.js +++ b/src/dash/models/DashManifestModel.js @@ -234,7 +234,7 @@ function DashManifestModel() { function getViewpointForAdaptation(adaptation) { if (!adaptation || !adaptation.hasOwnProperty(DashConstants.VIEWPOINT) || !adaptation[DashConstants.VIEWPOINT].length) return []; - return adaptation[DashConstants.VIEWPOINT].map( viewpoint => { + return adaptation[DashConstants.VIEWPOINT].map(viewpoint => { const vp = new DescriptorType(); return vp.init(viewpoint); }); @@ -242,7 +242,7 @@ function DashManifestModel() { function getRolesForAdaptation(adaptation) { if (!adaptation || !adaptation.hasOwnProperty(DashConstants.ROLE) || !adaptation[DashConstants.ROLE].length) return []; - return adaptation[DashConstants.ROLE].map( role => { + return adaptation[DashConstants.ROLE].map(role => { const r = new DescriptorType(); return r.init(role); }); @@ -250,7 +250,7 @@ function DashManifestModel() { function getAccessibilityForAdaptation(adaptation) { if (!adaptation || !adaptation.hasOwnProperty(DashConstants.ACCESSIBILITY) || !adaptation[DashConstants.ACCESSIBILITY].length) return []; - return adaptation[DashConstants.ACCESSIBILITY].map( accessibility => { + return adaptation[DashConstants.ACCESSIBILITY].map(accessibility => { const a = new DescriptorType(); return a.init(accessibility); }); @@ -258,7 +258,7 @@ function DashManifestModel() { function getAudioChannelConfigurationForAdaptation(adaptation) { if (!adaptation || !adaptation.hasOwnProperty(DashConstants.AUDIO_CHANNEL_CONFIGURATION) || !adaptation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].length) return []; - return adaptation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].map( audioChanCfg => { + return adaptation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].map(audioChanCfg => { const acc = new DescriptorType(); return acc.init(audioChanCfg); }); @@ -266,7 +266,7 @@ function DashManifestModel() { function getAudioChannelConfigurationForRepresentation(representation) { if (!representation || !representation.hasOwnProperty(DashConstants.AUDIO_CHANNEL_CONFIGURATION) || !representation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].length) return []; - return representation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].map( audioChanCfg => { + return representation[DashConstants.AUDIO_CHANNEL_CONFIGURATION].map(audioChanCfg => { const acc = new DescriptorType(); return acc.init(audioChanCfg); }); @@ -370,7 +370,7 @@ function DashManifestModel() { } } - // If the codec contains a profiles parameter we remove it. Otherwise it will cause problems when checking for codec capabilities of the platform + // If the codec contains a profiles parameter we remove it. Otherwise, it will cause problems when checking for codec capabilities of the platform if (codec) { codec = codec.replace(/\sprofiles=[^;]*/g, ''); } @@ -429,6 +429,21 @@ function DashManifestModel() { return adaptation.ContentProtection; } + function getAdaptationHasProtectedRepresentations(adaptation) { + if (adaptation && adaptation.hasOwnProperty(DashConstants.CONTENT_PROTECTION) && adaptation.ContentProtection.length > 0) { + return true; + } + + let encryptedRepresentations = []; + if (adaptation.Representation && adaptation.Representation.length > 0) { + encryptedRepresentations = adaptation.Representation.some((rep) => { + return rep.hasOwnProperty(DashConstants.CONTENT_PROTECTION) && rep.ContentProtection.length > 0 + }) + } + + return encryptedRepresentations.length > 0; + } + function getIsDynamic(manifest) { let isDynamic = false; if (manifest && manifest.hasOwnProperty('type')) { @@ -536,7 +551,7 @@ function DashManifestModel() { } } - function getRepresentationsForAdaptation(voAdaptation) { + function getRepresentationsForAdaptation(voAdaptation, mediaInfo) { const voRepresentations = []; const processedRealAdaptation = getRealAdaptationFor(voAdaptation); let segmentInfo, @@ -558,6 +573,7 @@ function DashManifestModel() { const voRepresentation = new Representation(); voRepresentation.index = i; voRepresentation.adaptation = voAdaptation; + voRepresentation.mediaInfo = mediaInfo; if (realRepresentation.hasOwnProperty(DashConstants.ID)) { voRepresentation.id = realRepresentation.id; @@ -573,6 +589,7 @@ function DashManifestModel() { } if (realRepresentation.hasOwnProperty(DashConstants.BANDWITH)) { voRepresentation.bandwidth = realRepresentation.bandwidth; + voRepresentation.bitrateInKbit = realRepresentation.bandwidth / 1000; } if (realRepresentation.hasOwnProperty(DashConstants.WIDTH)) { voRepresentation.width = realRepresentation.width; @@ -583,6 +600,9 @@ function DashManifestModel() { if (realRepresentation.hasOwnProperty(DashConstants.SCAN_TYPE)) { voRepresentation.scanType = realRepresentation.scanType; } + if (realRepresentation.hasOwnProperty(DashConstants.QUALITY_RANKING)) { + voRepresentation.qualityRanking = realRepresentation[DashConstants.QUALITY_RANKING]; + } if (realRepresentation.hasOwnProperty(DashConstants.MAX_PLAYOUT_RATE)) { voRepresentation.maxPlayoutRate = realRepresentation.maxPlayoutRate; } @@ -677,7 +697,7 @@ function DashManifestModel() { } } - voRepresentation.MSETimeOffset = calcMSETimeOffset(voRepresentation); + voRepresentation.mseTimeOffset = calcMseTimeOffset(voRepresentation); voRepresentation.path = [voAdaptation.period.index, voAdaptation.index, i]; voRepresentations.push(voRepresentation); } @@ -695,7 +715,7 @@ function DashManifestModel() { return s0.hasOwnProperty('d') ? s0.d : (s1.t - s0.t); } - function calcMSETimeOffset(representation) { + function calcMseTimeOffset(representation) { // The MSEOffset is offset from AST for media. It is Period@start - presentationTimeOffset const presentationOffset = representation.presentationTimeOffset; const periodStart = representation.adaptation.period.start; @@ -1292,7 +1312,7 @@ function DashManifestModel() { function getSupplementalPropertiesForAdaptation(adaptation) { if (!adaptation || !adaptation.hasOwnProperty(DashConstants.SUPPLEMENTAL_PROPERTY) || !adaptation.SupplementalProperty.length) return []; - return adaptation.SupplementalProperty.map( supp => { + return adaptation.SupplementalProperty.map(supp => { const s = new DescriptorType(); return s.init(supp); }); @@ -1300,7 +1320,7 @@ function DashManifestModel() { function getSupplementalPropertiesForRepresentation(representation) { if (!representation || !representation.hasOwnProperty(DashConstants.SUPPLEMENTAL_PROPERTY) || !representation.SupplementalProperty.length) return []; - return representation.SupplementalProperty.map( supp => { + return representation.SupplementalProperty.map(supp => { const s = new DescriptorType(); return s.init(supp); }); @@ -1330,6 +1350,7 @@ function DashManifestModel() { getAudioChannelConfigurationForAdaptation, getAudioChannelConfigurationForRepresentation, getAdaptationForIndex, + getAdaptationHasProtectedRepresentations, getIndexForAdaptation, getAdaptationForId, getAdaptationsForType, diff --git a/src/dash/utils/TimelineConverter.js b/src/dash/utils/TimelineConverter.js index 114cadac42..293f6c85eb 100644 --- a/src/dash/utils/TimelineConverter.js +++ b/src/dash/utils/TimelineConverter.js @@ -200,7 +200,7 @@ function TimelineConverter() { const now = calcPresentationTimeFromWallTime(new Date(), voPeriod); const timeShiftBufferDepth = voPeriod.mpd.timeShiftBufferDepth; const start = !isNaN(timeShiftBufferDepth) ? now - timeShiftBufferDepth : 0; - // check if we find a suitable period for that starttime. Otherwise we use the time closest to that + // check if we find a suitable period for that starttime. Otherwise, we use the time closest to that range.start = _adjustTimeBasedOnPeriodRanges(streams, start); range.end = !isNaN(range.start) && now < range.start ? now : _adjustTimeBasedOnPeriodRanges(streams, now, true); @@ -235,10 +235,19 @@ function TimelineConverter() { } streams.forEach((stream) => { - const adapter = stream.getAdapter(); - const mediaInfo = adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.VIDEO) || adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.AUDIO); - const voRepresentations = adapter.getVoRepresentations(mediaInfo); - const voRepresentation = voRepresentations[0]; + let voRepresentation = stream.getCurrentRepresentationForType(Constants.VIDEO); + if (!voRepresentation) { + voRepresentation = stream.getCurrentRepresentationForType(Constants.AUDIO) + } + + // If we still got not voRepresentation we are in the startup phase and nothing was selected yet. Use the default Representation + if (!voRepresentation) { + const adapter = stream.getAdapter(); + const mediaInfo = adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.VIDEO) || adapter.getMediaInfoForType(stream.getStreamInfo(), Constants.AUDIO); + const voRepresentations = adapter.getVoRepresentations(mediaInfo); + voRepresentation = voRepresentations[0]; + } + let periodRange = { start: NaN, end: NaN }; if (voRepresentation) { diff --git a/src/dash/vo/MediaInfo.js b/src/dash/vo/MediaInfo.js index 9650dce0d6..5c804e1211 100644 --- a/src/dash/vo/MediaInfo.js +++ b/src/dash/vo/MediaInfo.js @@ -34,29 +34,31 @@ */ class MediaInfo { constructor() { - this.id = null; - this.index = null; - this.type = null; - this.streamInfo = null; - this.representationCount = 0; - this.labels = null; - this.lang = null; - this.viewpoint = null; + this.KID = null; this.accessibility = null; + this.adaptationSetSwitchingCompatibleIds = []; this.audioChannelConfiguration = null; - this.roles = null; + this.bitrateList = null; this.codec = null; - this.mimeType = null; this.contentProtection = null; - this.isText = false; - this.KID = null; - this.bitrateList = null; - this.isFragmented = null; + this.id = null; + this.index = null; this.isEmbedded = null; - this.selectionPriority = 1; - this.supplementalProperties = {}; + this.isFragmented = null; + this.hasProtectedRepresentations = false; + this.isText = false; + this.labels = null; + this.lang = null; + this.mimeType = null; + this.representationCount = 0; + this.roles = null; this.segmentAlignment = false; + this.selectionPriority = 1; + this.streamInfo = null; this.subSegmentAlignment = false; + this.supplementalProperties = {}; + this.type = null; + this.viewpoint = null; } } diff --git a/src/dash/vo/Representation.js b/src/dash/vo/Representation.js index d5b5ff45c5..2e1bd3561c 100644 --- a/src/dash/vo/Representation.js +++ b/src/dash/vo/Representation.js @@ -36,33 +36,37 @@ import DashConstants from '../constants/DashConstants.js'; class Representation { + constructor() { - this.id = null; - this.index = -1; + this.absoluteIndex = NaN; this.adaptation = null; - this.segmentInfoType = null; - this.initialization = null; + this.availabilityTimeComplete = true; + this.availabilityTimeOffset = 0; + this.bandwidth = NaN; + this.bitrateInKbit = NaN; + this.codecPrivateData = null; this.codecs = null; + this.fragmentDuration = null; + this.frameRate = null; + this.height = NaN; + this.id = null; + this.indexRange = null; + this.initialization = null; + this.maxPlayoutRate = NaN; + this.mediaFinishedInformation = { numberOfSegments: 0, mediaTimeOfLastSignaledSegment: NaN }; + this.mediaInfo = null; this.mimeType = null; - this.codecPrivateData = null; + this.mseTimeOffset = NaN; + this.presentationTimeOffset = 0; + this.qualityRanking = NaN; + this.range = null; + this.scanType = null; + this.segments = null; this.segmentDuration = NaN; - this.timescale = 1; + this.segmentInfoType = null; this.startNumber = 1; - this.indexRange = null; - this.range = null; - this.presentationTimeOffset = 0; - // Set the source buffer timeOffset to this - this.MSETimeOffset = NaN; - // The information we need in the DashHandler to determine whether the last segment has been loaded - this.mediaFinishedInformation = { numberOfSegments: 0, mediaTimeOfLastSignaledSegment: NaN }; - this.bandwidth = NaN; + this.timescale = 1; this.width = NaN; - this.height = NaN; - this.scanType = null; - this.maxPlayoutRate = NaN; - this.availabilityTimeOffset = 0; - this.availabilityTimeComplete = true; - this.frameRate = null; } hasInitialization() { diff --git a/src/dash/vo/RepresentationInfo.js b/src/dash/vo/RepresentationInfo.js deleted file mode 100644 index 9d55e8c460..0000000000 --- a/src/dash/vo/RepresentationInfo.js +++ /dev/null @@ -1,45 +0,0 @@ -/** - * The copyright in this software is being made available under the BSD License, - * included below. This software may be subject to other third party and contributor - * rights, including patent rights, and no such rights are granted under this license. - * - * Copyright (c) 2013, Dash Industry Forum. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, - * are permitted provided that the following conditions are met: - * * Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation and/or - * other materials provided with the distribution. - * * Neither the name of Dash Industry Forum nor the names of its - * contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS AND ANY - * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. - * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, - * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ -/** - * @class - * @ignore - */ -class RepresentationInfo { - constructor() { - this.id = null; - this.quality = null; - this.fragmentDuration = null; - this.mediaInfo = null; - this.MSETimeOffset = null; - } -} - -export default RepresentationInfo; diff --git a/src/dash/vo/Segment.js b/src/dash/vo/Segment.js index 33d6559ad7..8129ec978b 100644 --- a/src/dash/vo/Segment.js +++ b/src/dash/vo/Segment.js @@ -46,7 +46,7 @@ class Segment { this.replacementNumber = NaN; // This is supposed to match the time encoded in the media Segment this.mediaStartTime = NaN; - // When the source buffer timeOffset is set to MSETimeOffset this is the + // When the source buffer timeOffset is set to mseTimeOffset this is the // time that will match the seekTarget and video.currentTime this.presentationStartTime = NaN; // Do not schedule this segment until diff --git a/src/mss/MssFragmentInfoController.js b/src/mss/MssFragmentInfoController.js index 1a7e195b45..5f3ddee118 100644 --- a/src/mss/MssFragmentInfoController.js +++ b/src/mss/MssFragmentInfoController.js @@ -122,11 +122,10 @@ function MssFragmentInfoController(config) { // request.availabilityStartTime = segment.availabilityStartTime; // request.availabilityEndTime = segment.availabilityEndTime; // request.wallStartTime = segment.wallStartTime; - request.quality = representation.index; + request.bandwidth = representation.bandwidth; request.index = index++; - request.mediaInfo = streamProcessor.getMediaInfo(); request.adaptationIndex = representation.adaptation.index; - request.representationId = representation.id; + request.representation = representation; request.url = baseURLController.resolve(representation.path).url + adaptation.SegmentTemplate.media; request.url = request.url.replace('$Bandwidth$', representation.bandwidth); request.url = request.url.replace('$Time$', segment.tManifest ? segment.tManifest : segment.t); diff --git a/src/mss/MssFragmentMoofProcessor.js b/src/mss/MssFragmentMoofProcessor.js index c9fe6f27c4..03781d750e 100644 --- a/src/mss/MssFragmentMoofProcessor.js +++ b/src/mss/MssFragmentMoofProcessor.js @@ -32,6 +32,7 @@ import DashJSError from '../streaming/vo/DashJSError.js'; import MssErrors from './errors/MssErrors.js'; import Events from '../streaming/MediaPlayerEvents.js'; +import FactoryMaker from '../core/FactoryMaker.js'; /** * @module MssFragmentMoofProcessor @@ -219,7 +220,7 @@ function MssFragmentMoofProcessor(config) { const isoFile = ISOBoxer.parseBuffer(e.response); // Update track_Id in tfhd box const tfhd = isoFile.fetch('tfhd'); - tfhd.track_ID = e.request.mediaInfo.index + 1; + tfhd.track_ID = e.request.representation.mediaInfo.index + 1; // Add tfdt box let tfdt = isoFile.fetch('tfdt'); @@ -317,7 +318,7 @@ function MssFragmentMoofProcessor(config) { const isoFile = ISOBoxer.parseBuffer(e.response); // Update track_Id in tfhd box const tfhd = isoFile.fetch('tfhd'); - tfhd.track_ID = e.request.mediaInfo.index + 1; + tfhd.track_ID = e.request.representation.mediaInfo.index + 1; // Add tfdt box let tfdt = isoFile.fetch('tfdt'); @@ -342,9 +343,9 @@ function MssFragmentMoofProcessor(config) { } instance = { - convertFragment: convertFragment, - updateSegmentList: updateSegmentList, - getType: getType + convertFragment, + updateSegmentList, + getType }; setup(); @@ -352,4 +353,4 @@ function MssFragmentMoofProcessor(config) { } MssFragmentMoofProcessor.__dashjs_factory_name = 'MssFragmentMoofProcessor'; -export default dashjs.FactoryMaker.getClassFactory(MssFragmentMoofProcessor); /* jshint ignore:line */ +export default FactoryMaker.getClassFactory(MssFragmentMoofProcessor); diff --git a/src/mss/MssFragmentMoovProcessor.js b/src/mss/MssFragmentMoovProcessor.js index 1d07bfec0d..16c88b4964 100644 --- a/src/mss/MssFragmentMoovProcessor.js +++ b/src/mss/MssFragmentMoovProcessor.js @@ -29,6 +29,7 @@ * POSSIBILITY OF SUCH DAMAGE. */ import MssErrors from './errors/MssErrors.js'; +import FactoryMaker from '../core/FactoryMaker.js'; /** * @module MssFragmentMoovProcessor @@ -653,4 +654,4 @@ function MssFragmentMoovProcessor(config) { } MssFragmentMoovProcessor.__dashjs_factory_name = 'MssFragmentMoovProcessor'; -export default dashjs.FactoryMaker.getClassFactory(MssFragmentMoovProcessor); /* jshint ignore:line */ +export default FactoryMaker.getClassFactory(MssFragmentMoovProcessor); diff --git a/src/mss/MssFragmentProcessor.js b/src/mss/MssFragmentProcessor.js index b22fc1c44e..9ff5600c02 100644 --- a/src/mss/MssFragmentProcessor.js +++ b/src/mss/MssFragmentProcessor.js @@ -32,6 +32,7 @@ import MssFragmentMoofProcessor from './MssFragmentMoofProcessor.js'; import MssFragmentMoovProcessor from './MssFragmentMoovProcessor.js'; import {HTTPRequest} from '../streaming/vo/metrics/HTTPRequest.js'; +import FactoryMaker from '../core/FactoryMaker.js'; // Add specific box processors not provided by codem-isoboxer library @@ -185,4 +186,4 @@ function MssFragmentProcessor(config) { } MssFragmentProcessor.__dashjs_factory_name = 'MssFragmentProcessor'; -export default dashjs.FactoryMaker.getClassFactory(MssFragmentProcessor); /* jshint ignore:line */ +export default FactoryMaker.getClassFactory(MssFragmentProcessor); diff --git a/src/mss/MssHandler.js b/src/mss/MssHandler.js index e63dafc090..2d9893a439 100644 --- a/src/mss/MssHandler.js +++ b/src/mss/MssHandler.js @@ -77,14 +77,13 @@ function MssHandler(config) { const chunk = new DataChunk(); chunk.streamId = streamId; - chunk.mediaInfo = request.mediaInfo; chunk.segmentType = request.type; chunk.start = request.startTime; chunk.duration = request.duration; chunk.end = chunk.start + chunk.duration; chunk.index = request.index; - chunk.quality = request.quality; - chunk.representationId = request.representationId; + chunk.bandwidth = request.bandwidth; + chunk.representation = request.representation; chunk.endFragment = endFragment; return chunk; @@ -134,9 +133,8 @@ function MssHandler(config) { request.mediaType = representation.adaptation.type; request.type = initSegmentType; request.range = representation.range; - request.quality = representation.index; - request.mediaInfo = mediaInfo; - request.representationId = representation.id; + request.bandwidth = representation.bandwidth; + request.representation = representation; const chunk = createDataChunk(request, mediaInfo.streamInfo.id, e.type !== events.FRAGMENT_LOADING_PROGRESS); @@ -175,7 +173,7 @@ function MssHandler(config) { } // Start MssFragmentInfoControllers in case of start-over streams - let manifestInfo = e.request.mediaInfo.streamInfo.manifestInfo; + let manifestInfo = e.request.representation.mediaInfo.streamInfo.manifestInfo; if (!manifestInfo.isDynamic && manifestInfo.dvrWindowSize !== Infinity) { startFragmentInfoControllers(); } diff --git a/src/offline/OfflineStreamProcessor.js b/src/offline/OfflineStreamProcessor.js index 4d64c6d58f..f36e08d912 100644 --- a/src/offline/OfflineStreamProcessor.js +++ b/src/offline/OfflineStreamProcessor.js @@ -309,7 +309,7 @@ function OfflineStreamProcessor(config) { let voRepresentations = adapter.getVoRepresentations(mediaInfo); // get representation VO according to id. - let quality = voRepresentations.findIndex((representation) => { + let quality = voRepresentations.find((representation) => { return representation.id === bitrate.id; }); @@ -318,7 +318,7 @@ function OfflineStreamProcessor(config) { return; } - representationController.updateData(null, voRepresentations, type, mediaInfo.isFragmented, quality); + representationController.updateData(voRepresentations, mediaInfo.isFragmented, quality.id); } function isUpdating() { diff --git a/src/streaming/FragmentLoader.js b/src/streaming/FragmentLoader.js index c1db290f5c..f531888099 100644 --- a/src/streaming/FragmentLoader.js +++ b/src/streaming/FragmentLoader.js @@ -103,6 +103,8 @@ function FragmentLoader(config) { stream: event.stream, streamId }); + + // Only in case of FetchAPI and low latency streaming. XHR does not have data attribute. if (event.data) { eventBus.trigger(events.LOADING_DATA_PROGRESS, { request: request, diff --git a/src/streaming/ManifestUpdater.js b/src/streaming/ManifestUpdater.js index c6426d1e42..0904e1c9a0 100644 --- a/src/streaming/ManifestUpdater.js +++ b/src/streaming/ManifestUpdater.js @@ -296,12 +296,12 @@ function ManifestUpdater() { } instance = { - initialize: initialize, - setManifest: setManifest, - refreshManifest: refreshManifest, - getIsUpdating: getIsUpdating, - setConfig: setConfig, - reset: reset + initialize, + setManifest, + refreshManifest, + getIsUpdating, + setConfig, + reset }; setup(); diff --git a/src/streaming/MediaPlayer.js b/src/streaming/MediaPlayer.js index 275e02e9f4..e8948c5c64 100644 --- a/src/streaming/MediaPlayer.js +++ b/src/streaming/MediaPlayer.js @@ -84,7 +84,7 @@ import ThroughputController from './controllers/ThroughputController.js'; /** * The media types - * @typedef {("video" | "audio" | "text" | "image")} MediaType + * @typedef {('video' | 'audio' | 'text' | 'image')} MediaType */ /** @@ -398,7 +398,9 @@ function MediaPlayer() { mediaController.setConfig({ domStorage, settings, - customParametersModel + mediaPlayerModel, + customParametersModel, + videoModel }); mediaPlayerModel.setConfig({ @@ -954,100 +956,6 @@ function MediaPlayer() { return _getAsUTC(duration()); } - /* - --------------------------------------------------------------------------- - - AUTO BITRATE - - --------------------------------------------------------------------------- - */ - /** - * Gets the top quality BitrateInfo checking portal limit and max allowed. - * It calls getMaxAllowedIndexFor internally - * - * @param {MediaType} type - 'video' or 'audio' - * @memberof module:MediaPlayer - * @returns {BitrateInfo | null} - * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function - * @instance - */ - function getTopBitrateInfoFor(type) { - if (!streamingInitialized) { - throw STREAMING_NOT_INITIALIZED_ERROR; - } - return abrController.getTopBitrateInfoFor(type); - } - - /** - * Gets the current download quality for media type video, audio or images. For video and audio types the ABR - * rules update this value before every new download unless autoSwitchBitrate is set to false. For 'image' - * type, thumbnails, there is no ABR algorithm and quality is set manually. - * - * @param {MediaType} type - 'video', 'audio' or 'image' (thumbnails) - * @returns {number} the quality index, 0 corresponding to the lowest bitrate - * @memberof module:MediaPlayer - * @see {@link module:MediaPlayer#setQualityFor setQualityFor()} - * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function - * @instance - */ - function getQualityFor(type) { - if (!streamingInitialized) { - throw STREAMING_NOT_INITIALIZED_ERROR; - } - if (type === Constants.IMAGE) { - const activeStream = getActiveStream(); - if (!activeStream) { - return -1; - } - const thumbnailController = activeStream.getThumbnailController(); - - return !thumbnailController ? -1 : thumbnailController.getCurrentTrackIndex(); - } - return abrController.getQualityFor(type); - } - - /** - * Sets the current quality for media type instead of letting the ABR Heuristics automatically selecting it. - * This value will be overwritten by the ABR rules unless autoSwitchBitrate is set to false. - * - * @param {MediaType} type - 'video', 'audio' or 'image' - * @param {number} value - the quality index, 0 corresponding to the lowest bitrate - * @param {boolean} forceReplace - true if segments have to be replaced by segments of the new quality - * @memberof module:MediaPlayer - * @see {@link module:MediaPlayer#getQualityFor getQualityFor()} - * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function - * @instance - */ - function setQualityFor(type, value, forceReplace = false) { - if (!streamingInitialized) { - throw STREAMING_NOT_INITIALIZED_ERROR; - } - if (type === Constants.IMAGE) { - const activeStream = getActiveStream(); - if (!activeStream) { - return; - } - const thumbnailController = activeStream.getThumbnailController(); - if (thumbnailController) { - thumbnailController.setTrackByIndex(value); - } - } - abrController.setPlaybackQuality(type, streamController.getActiveStreamInfo(), value, { forceReplace }); - } - - /** - * Update the video element size variables - * Should be called on window resize (or any other time player is resized). Fullscreen does trigger a window resize event. - * - * Once windowResizeEventCalled = true, abrController.checkPortalSize() will use element size variables rather than querying clientWidth every time. - * - * @memberof module:MediaPlayer - * @instance - */ - function updatePortalSize() { - abrController.setElementSize(); - abrController.setWindowResizeEventCalled(true); - } /* --------------------------------------------------------------------------- @@ -1225,12 +1133,14 @@ function MediaPlayer() { * Returns the average throughput computed in the ThroughputController in kbit/s * * @param {MediaType} type + * @param {string} calculationMode + * @param {number} sampleSize * @return {number} value * @memberof module:MediaPlayer * @instance */ - function getAverageThroughput(type) { - return throughputController ? throughputController.getAverageThroughput(type) : 0; + function getAverageThroughput(type, calculationMode = null, sampleSize = NaN) { + return throughputController ? throughputController.getAverageThroughput(type, calculationMode, sampleSize) : 0; } /** @@ -1484,23 +1394,129 @@ function MediaPlayer() { /* --------------------------------------------------------------------------- - STREAM AND TRACK MANAGEMENT + QUALITY AND TRACK MANAGEMENT --------------------------------------------------------------------------- */ + + /** + * Gets the current download quality for media type video, audio or images. For video and audio types the ABR + * rules update this value before every new download unless autoSwitchBitrate is set to false. For 'image' + * type, thumbnails, there is no ABR algorithm and quality is set manually. + * + * @param {MediaType} type - 'video', 'audio' or 'image' (thumbnails) + * @returns {Representation | null} the quality index, 0 corresponding to the lowest bitrate + * @memberof module:MediaPlayer + * @see {@link module:MediaPlayer#getCurrentRepresentationForType getCurrentRepresentationForType()} + * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function + * @instance + */ + function getCurrentRepresentationForType(type) { + if (!streamingInitialized) { + throw STREAMING_NOT_INITIALIZED_ERROR; + } + + if (type !== Constants.IMAGE && type !== Constants.VIDEO && type !== Constants.AUDIO) { + return null; + } + + const activeStream = getActiveStream(); + if (!activeStream) { + return null; + } + + if (type === Constants.IMAGE) { + const thumbnailController = activeStream.getThumbnailController(); + return !thumbnailController ? -1 : thumbnailController.getCurrentTrack(); + } + + return activeStream.getCurrentRepresentationForType(type); + } + + /** + * Sets the current quality for media type instead of letting the ABR Heuristics automatically selecting it. + * This value will be overwritten by the ABR rules unless autoSwitchBitrate is set to false. + * + * @param {MediaType} type - 'video', 'audio' or 'image' + * @param {number} value - the quality index, 0 corresponding to the lowest bitrate + * @param {boolean} forceReplace - true if segments have to be replaced by segments of the new quality + * @memberof module:MediaPlayer + * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function + * @instance + */ + function setRepresentationForTypeById(type, id, forceReplace = false) { + if (type !== Constants.IMAGE && type !== Constants.VIDEO && type !== Constants.AUDIO) { + return; + } + if (!streamingInitialized) { + throw STREAMING_NOT_INITIALIZED_ERROR; + } + const activeStream = getActiveStream(); + if (!activeStream) { + return; + } + if (type === Constants.IMAGE) { + const thumbnailController = activeStream.getThumbnailController(); + if (thumbnailController) { + thumbnailController.setTrackById(id); + } + } else { + const representation = activeStream.getRepresentationForTypeById(type, id); + if (representation) { + abrController.setPlaybackQuality(type, streamController.getActiveStreamInfo(), representation, { forceReplace }); + } + } + } + + /** + * Sets the current quality for media type instead of letting the ABR Heuristics automatically selecting it. + * This value will be overwritten by the ABR rules unless autoSwitchBitrate is set to false. + * + * @param {MediaType} type - 'video', 'audio' or 'image' + * @param {number} value - the quality index, 0 corresponding to the lowest absolute index + * @param {boolean} forceReplace - true if segments have to be replaced by segments of the new quality + * @memberof module:MediaPlayer + * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function + * @instance + */ + function setRepresentationForTypeByIndex(type, index, forceReplace = false) { + if (type !== Constants.IMAGE && type !== Constants.VIDEO && type !== Constants.AUDIO) { + return; + } + if (!streamingInitialized) { + throw STREAMING_NOT_INITIALIZED_ERROR; + } + const activeStream = getActiveStream(); + if (!activeStream) { + return; + } + if (type === Constants.IMAGE) { + const thumbnailController = activeStream.getThumbnailController(); + if (thumbnailController) { + thumbnailController.setTrackByIndex(index); + } + } else { + const representation = activeStream.getRepresentationForTypeByIndex(type, index); + if (representation) { + abrController.setPlaybackQuality(type, streamController.getActiveStreamInfo(), representation, { forceReplace }); + } + } + } + /** * @param {MediaType} type + * @param {string} streamId * @returns {Array} * @memberof module:MediaPlayer * @throws {@link module:MediaPlayer~STREAMING_NOT_INITIALIZED_ERROR STREAMING_NOT_INITIALIZED_ERROR} if called before initializePlayback function * @instance */ - function getBitrateInfoListFor(type) { + function getRepresentationsByType(type, streamId = null) { if (!streamingInitialized) { throw STREAMING_NOT_INITIALIZED_ERROR; } - let stream = getActiveStream(); - return stream ? stream.getBitrateListFor(type) : []; + let stream = streamId ? streamController.getStreamById(streamId) : getActiveStream(); + return stream ? stream.getRepresentationsByType(type) : []; } /** @@ -2248,7 +2264,8 @@ function MediaPlayer() { cmsdModel.setConfig({}); // initializes controller - throughputController.initialize() + mediaController.initialize(); + throughputController.initialize(); abrController.initialize(); streamController.initialize(autoPlay, protectionData); textController.initialize(); @@ -2494,104 +2511,103 @@ function MediaPlayer() { } instance = { - initialize, - setConfig, - on, - off, - extend, - attachView, + addABRCustomRule, + addUTCTimingSource, + attachProtectionController, attachSource, - isReady, - preload, - play, - isPaused, - pause, - isSeeking, - isDynamic, - getLowLatencyModeEnabled, - seek, - seekToOriginalLive, - setPlaybackRate, - getPlaybackRate, - setMute, - isMuted, - setVolume, - getVolume, - time, + attachTTMLRenderingDiv, + attachView, + attachVttRenderingDiv, + clearDefaultUTCTimingSources, + convertToTimeCode, + destroy, duration, - timeAsUTC, durationAsUTC, + enableForcedTextStreaming, + enableText, + extend, + formatUTC, + getABRCustomRules, getActiveStream, - getDVRWindowSize, - getDVRSeekOffset, + getAutoPlay, getAvailableBaseUrls, getAvailableLocations, - getTargetLiveDelay, - convertToTimeCode, - formatUTC, - getVersion, - getDebug, + getAverageThroughput, getBufferLength, - getTTMLRenderingDiv, - getVideoElement, - getSource, - updateSource, getCurrentLiveLatency, - getTopBitrateInfoFor, - setAutoPlay, - getAutoPlay, + getCurrentSteeringResponseData, + getCurrentTextTrackIndex, + getCurrentTrackFor, + getDVRSeekOffset, + getDVRWindowSize, + getDashAdapter, getDashMetrics, - getQualityFor, - setQualityFor, - updatePortalSize, - enableText, - enableForcedTextStreaming, - isTextEnabled, - setTextTrack, - getBitrateInfoListFor, + getDebug, + getInitialMediaSettingsFor, + getLowLatencyModeEnabled, + getOfflineController, + getPlaybackRate, + getProtectionController, + getCurrentRepresentationForType, + getRepresentationsByType, + getSettings, + getSource, getStreamsFromManifest, + getTTMLRenderingDiv, + getTargetLiveDelay, getTracksFor, getTracksForTypeFromManifest, - getCurrentTrackFor, - setInitialMediaSettingsFor, - getInitialMediaSettingsFor, - setCurrentTrack, - addABRCustomRule, + getVersion, + getVideoElement, + getVolume, + getXHRWithCredentialsForType, + initialize, + isDynamic, + isMuted, + isPaused, + isReady, + isSeeking, + isTextEnabled, + off, + on, + pause, + play, + preload, + provideThumbnail, + registerCustomCapabilitiesFilter, + registerLicenseRequestFilter, + registerLicenseResponseFilter, removeABRCustomRule, removeAllABRCustomRule, - getABRCustomRules, - getAverageThroughput, - retrieveManifest, - addUTCTimingSource, removeUTCTimingSource, - clearDefaultUTCTimingSources, + reset, + resetCustomInitialTrackSelectionFunction, + resetSettings, restoreDefaultUTCTimingSources, - setXHRWithCredentialsForType, - getXHRWithCredentialsForType, - getProtectionController, - attachProtectionController, + retrieveManifest, + seek, + seekToOriginalLive, + setAutoPlay, + setConfig, + setCurrentTrack, + setCustomInitialTrackSelectionFunction, + setInitialMediaSettingsFor, + setMute, + setPlaybackRate, setProtectionData, - registerLicenseRequestFilter, - registerLicenseResponseFilter, + setRepresentationForTypeByIndex, + setRepresentationForTypeById, + setTextTrack, + setVolume, + setXHRWithCredentialsForType, + time, + timeAsUTC, + triggerSteeringRequest, + unregisterCustomCapabilitiesFilter, unregisterLicenseRequestFilter, unregisterLicenseResponseFilter, - registerCustomCapabilitiesFilter, - unregisterCustomCapabilitiesFilter, - setCustomInitialTrackSelectionFunction, - resetCustomInitialTrackSelectionFunction, - attachTTMLRenderingDiv, - attachVttRenderingDiv, - getCurrentTextTrackIndex, - provideThumbnail, - getDashAdapter, - getOfflineController, - triggerSteeringRequest, - getCurrentSteeringResponseData, - getSettings, updateSettings, - resetSettings, - reset, - destroy + updateSource, }; setup(); diff --git a/src/streaming/SourceBufferSink.js b/src/streaming/SourceBufferSink.js index cd67257f93..d8db9be815 100644 --- a/src/streaming/SourceBufferSink.js +++ b/src/streaming/SourceBufferSink.js @@ -71,10 +71,13 @@ function SourceBufferSink(config) { logger = Debug(context).getInstance().getLogger(instance); } + function _getCodecStringForRepresentation(representation) { + return representation.mimeType + ';codecs="' + representation.codecs + '"'; + } + function initializeForStreamSwitch(mInfo, selectedRepresentation, oldSourceBufferSink) { mediaInfo = mInfo; type = mediaInfo.type; - const codec = mediaInfo.codec; _copyPreviousSinkData(oldSourceBufferSink); _addEventListeners(); @@ -85,20 +88,22 @@ function SourceBufferSink(config) { promises.push(updateAppendWindow(mediaInfo.streamInfo)); if (settings.get().streaming.buffer.useChangeTypeForTrackSwitch) { - promises.push(changeType(codec)); + promises.push(changeType(selectedRepresentation)); } - if (selectedRepresentation && selectedRepresentation.MSETimeOffset !== undefined) { - promises.push(updateTimestampOffset(selectedRepresentation.MSETimeOffset)); + if (selectedRepresentation && selectedRepresentation.mseTimeOffset !== undefined) { + promises.push(updateTimestampOffset(selectedRepresentation.mseTimeOffset)); } return Promise.all(promises); } - function changeType(codec) { + function changeType(representation) { + const codec = _getCodecStringForRepresentation(representation); return new Promise((resolve) => { _waitForUpdateEnd(() => { if (buffer.changeType) { + logger.debug(`Changing SourceBuffer codec to ${codec}`); buffer.changeType(codec); } resolve(); @@ -113,7 +118,7 @@ function SourceBufferSink(config) { function initializeForFirstUse(streamInfo, mInfo, selectedRepresentation) { mediaInfo = mInfo; type = mediaInfo.type; - const codec = mediaInfo.codec; + const codec = selectedRepresentation ? _getCodecStringForRepresentation(selectedRepresentation) : mInfo.codec; try { // Safari claims to support anything starting 'application/mp4'. // it definitely doesn't understand 'application/mp4;codecs="stpp"' @@ -131,8 +136,8 @@ function SourceBufferSink(config) { promises.push(updateAppendWindow(mediaInfo.streamInfo)); - if (selectedRepresentation && selectedRepresentation.MSETimeOffset !== undefined) { - promises.push(updateTimestampOffset(selectedRepresentation.MSETimeOffset)); + if (selectedRepresentation && selectedRepresentation.mseTimeOffset !== undefined) { + promises.push(updateTimestampOffset(selectedRepresentation.mseTimeOffset)); } return Promise.all(promises); @@ -225,7 +230,7 @@ function SourceBufferSink(config) { }); } - function updateTimestampOffset(MSETimeOffset) { + function updateTimestampOffset(mseTimeOffset) { return new Promise((resolve) => { if (!buffer) { @@ -235,9 +240,9 @@ function SourceBufferSink(config) { _waitForUpdateEnd(() => { try { - if (buffer.timestampOffset !== MSETimeOffset && !isNaN(MSETimeOffset)) { - buffer.timestampOffset = MSETimeOffset; - logger.debug(`Set MSE timestamp offset to ${MSETimeOffset}`); + if (buffer.timestampOffset !== mseTimeOffset && !isNaN(mseTimeOffset)) { + buffer.timestampOffset = mseTimeOffset; + logger.debug(`Set MSE timestamp offset to ${mseTimeOffset}`); } resolve(); } catch (e) { @@ -467,18 +472,18 @@ function SourceBufferSink(config) { } instance = { - getType, + abort, + append, + changeType, getAllBufferRanges, getBuffer, - append, + getType, + initializeForFirstUse, + initializeForStreamSwitch, remove, - abort, reset, - updateTimestampOffset, - initializeForStreamSwitch, - initializeForFirstUse, updateAppendWindow, - changeType + updateTimestampOffset, }; setup(); diff --git a/src/streaming/Stream.js b/src/streaming/Stream.js index a7356626b3..61081d9fbe 100644 --- a/src/streaming/Stream.js +++ b/src/streaming/Stream.js @@ -82,8 +82,6 @@ function Stream(config) { hasFinishedBuffering, hasVideoTrack, hasAudioTrack, - updateError, - isUpdating, fragmentController, thumbnailController, segmentBlacklistController, @@ -140,7 +138,6 @@ function Stream(config) { */ function registerEvents() { eventBus.on(Events.BUFFERING_COMPLETED, onBufferingCompleted, instance); - eventBus.on(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance); eventBus.on(Events.INBAND_EVENTS, onInbandEvents, instance); } @@ -148,7 +145,6 @@ function Stream(config) { * Unregister the streaming events */ function unRegisterEvents() { - eventBus.off(Events.DATA_UPDATE_COMPLETED, onDataUpdateCompleted, instance); eventBus.off(Events.BUFFERING_COMPLETED, onBufferingCompleted, instance); eventBus.off(Events.INBAND_EVENTS, onInbandEvents, instance); } @@ -274,7 +270,6 @@ function Stream(config) { return new Promise((resolve, reject) => { checkConfig(); - isUpdating = true; _addInlineEvents(); @@ -292,14 +287,12 @@ function Stream(config) { return _createBufferSinks(previousBufferSinks) }) .then((bufferSinks) => { - isUpdating = false; - if (streamProcessors.length === 0) { const msg = 'No streams to play.'; errHandler.error(new DashJSError(Errors.MANIFEST_ERROR_ID_NOSTREAMS_CODE, msg, manifestModel.getValue())); logger.fatal(msg); } else { - _checkIfInitializationCompleted(); + _initializationCompleted(); } if (mediaSource) { @@ -416,7 +409,6 @@ function Stream(config) { initialMediaInfo = mediaController.getCurrentTrackFor(type, streamInfo.id); if (initialMediaInfo) { - abrController.updateTopQualityIndex(initialMediaInfo); // In case of mixed fragmented and embedded text tracks, check if initial selected text track is not an embedded track return streamProcessor.selectMediaInfo((type !== Constants.TEXT || !initialMediaInfo.isEmbedded) ? initialMediaInfo : allMediaForType[0]); } @@ -486,10 +478,7 @@ function Stream(config) { streamProcessor.initialize(mediaSource, hasVideoTrack, isFragmented); streamProcessors.push(streamProcessor); - - for (let i = 0; i < allMediaForType.length; i++) { - streamProcessor.addMediaInfo(allMediaForType[i]); - } + streamProcessor.setMediaInfoArray(allMediaForType); if (type === Constants.TEXT) { textController.addMediaInfosToBuffer(streamInfo, type, allMediaForType, fragmentModel); @@ -604,8 +593,6 @@ function Stream(config) { isInitialized = false; hasVideoTrack = false; hasAudioTrack = false; - updateError = {}; - isUpdating = false; isEndedEventSignaled = false; trackChangedEvents = []; } @@ -683,16 +670,69 @@ function Stream(config) { * @returns {Array} * @memberof Stream# */ - function getBitrateListFor(type) { + function getRepresentationsByType(type) { checkConfig(); if (type === Constants.IMAGE) { if (!thumbnailController) { return []; } - return thumbnailController.getBitrateList(); + return thumbnailController.getPossibleVoRepresentations(); } const mediaInfo = getMediaInfo(type); - return abrController.getBitrateList(mediaInfo); + return abrController.getPossibleVoRepresentations(mediaInfo, true); + } + + /** + * @param {string} type + * @param {string} id + * @returns {Array} + * @memberof Stream# + */ + function getRepresentationForTypeById(type, id) { + let possibleVoRepresentations; + + if (type === Constants.IMAGE) { + if (!thumbnailController) { + return null; + } + possibleVoRepresentations = thumbnailController.getPossibleVoRepresentations(); + } else { + const mediaInfo = getMediaInfo(type); + possibleVoRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + } + + if (!possibleVoRepresentations || possibleVoRepresentations.length === 0) { + return null + } + const targetReps = possibleVoRepresentations.filter((rep) => { + return rep.id === id + }) + + return targetReps && targetReps.length > 0 ? targetReps[0] : null; + } + + /** + * @param {string} type + * @param {number} index + * @returns {Array} + * @memberof Stream# + */ + function getRepresentationForTypeByIndex(type, index) { + let possibleVoRepresentations; + + if (type === Constants.IMAGE) { + if (!thumbnailController) { + return null; + } + possibleVoRepresentations = thumbnailController.getPossibleVoRepresentations(); + } else { + const mediaInfo = getMediaInfo(type); + possibleVoRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + } + + index = Math.max(Math.min(index, possibleVoRepresentations.length - 1), 0) + + return possibleVoRepresentations[index]; } function onProtectionError(event) { @@ -712,8 +752,6 @@ function Stream(config) { let mediaInfo = e.newMediaInfo; let manifest = manifestModel.getValue(); - adapter.setCurrentMediaInfo(streamInfo.id, mediaInfo.type, mediaInfo); - let processor = getProcessorForMediaInfo(mediaInfo); if (!processor) return; @@ -730,9 +768,6 @@ function Stream(config) { } else { processor.selectMediaInfo(mediaInfo) .then(() => { - if (mediaInfo.type === Constants.VIDEO || mediaInfo.type === Constants.AUDIO) { - abrController.updateTopQualityIndex(mediaInfo); - } processor.prepareTrackSwitch(); }); } @@ -755,49 +790,6 @@ function Stream(config) { } } - function _checkIfInitializationCompleted() { - const ln = streamProcessors.length; - const hasError = !!updateError.audio || !!updateError.video; - let error = hasError ? new DashJSError(Errors.DATA_UPDATE_FAILED_ERROR_CODE, Errors.DATA_UPDATE_FAILED_ERROR_MESSAGE) : null; - - for (let i = 0; i < ln; i++) { - if (streamProcessors[i].isUpdating() || isUpdating) { - return; - } - } - - if (protectionController) { - // Need to check if streamProcessors exists because streamProcessors - // could be cleared in case an error is detected while initializing DRM keysystem - protectionController.clearMediaInfoArray(); - for (let i = 0; i < ln && streamProcessors[i]; i++) { - const type = streamProcessors[i].getType(); - const mediaInfo = streamProcessors[i].getMediaInfo(); - if (type === Constants.AUDIO || - type === Constants.VIDEO || - (type === Constants.TEXT && mediaInfo.isFragmented)) { - let mediaInfo = streamProcessors[i].getMediaInfo(); - if (mediaInfo) { - protectionController.initializeForMedia(mediaInfo); - } - } - } - protectionController.handleKeySystemFromManifest(); - } - - if (error) { - errHandler.error(error); - } else if (!isInitialized) { - isInitialized = true; - videoModel.waitForReadyState(Constants.VIDEO_ELEMENT_READY_STATES.HAVE_METADATA, () => { - eventBus.trigger(Events.STREAM_INITIALIZED, { - streamInfo: streamInfo - }); - }) - } - - } - function getMediaInfo(type) { let streamProcessor = null; @@ -835,11 +827,6 @@ function Stream(config) { eventBus.trigger(Events.STREAM_BUFFERING_COMPLETED, { streamInfo: streamInfo }, { streamInfo }); } - function onDataUpdateCompleted(e) { - updateError[e.mediaType] = e.error; - _checkIfInitializationCompleted(); - } - function onInbandEvents(e) { if (eventController) { eventController.addInbandEvents(e.events, streamInfo.id); @@ -893,7 +880,6 @@ function Stream(config) { function updateData(updatedStreamInfo) { return new Promise((resolve) => { - isUpdating = true; streamInfo = updatedStreamInfo; if (eventController) { @@ -913,15 +899,11 @@ function Stream(config) { }); // Check if AdaptationSet has not been removed in MPD update + streamProcessor.setMediaInfoArray(allMediaForType); if (allMediaForType) { - // Remove the current mediaInfo objects before adding the updated ones - streamProcessor.clearMediaInfoArray(); for (let j = 0; j < allMediaForType.length; j++) { - const mInfo = allMediaForType[j]; - streamProcessor.addMediaInfo(allMediaForType[j]); - if (adapter.areMediaInfosEqual(currentMediaInfo, mInfo)) { - abrController.updateTopQualityIndex(mInfo); - promises.push(streamProcessor.selectMediaInfo(mInfo)) + if (adapter.areMediaInfosEqual(currentMediaInfo, allMediaForType[j])) { + promises.push(streamProcessor.selectMediaInfo(allMediaForType[j])) } } } @@ -929,8 +911,9 @@ function Stream(config) { Promise.all(promises) .then(() => { - promises = []; + let promises = []; + // Only relevant for MSS while (trackChangedEvents.length > 0) { let trackChangedEvent = trackChangedEvents.pop(); let mediaInfo = trackChangedEvent.newMediaInfo; @@ -943,91 +926,47 @@ function Stream(config) { return Promise.all(promises) }) .then(() => { - isUpdating = false; - _checkIfInitializationCompleted(); + _initializationCompleted(); eventBus.trigger(Events.STREAM_UPDATED, { streamInfo: streamInfo }); resolve(); }) + .catch((e) => { + errHandler.error(e); + }) }) } - function isMediaCodecCompatible(newStream, previousStream = null) { - return compareCodecs(newStream, Constants.VIDEO, previousStream) && compareCodecs(newStream, Constants.AUDIO, previousStream); - } - - function isProtectionCompatible(newStream) { - if (!newStream) { - return true; - } - return _compareProtectionConfig(Constants.VIDEO, newStream) && _compareProtectionConfig(Constants.AUDIO, newStream); - } - - function _compareProtectionConfig(type, newStream) { - const currentStreamInfo = getStreamInfo(); - const newStreamInfo = newStream.getStreamInfo(); - - if (!newStreamInfo || !currentStreamInfo) { - return true; - } - - const newAdaptation = adapter.getAdaptationForType(newStreamInfo.index, type, newStreamInfo); - const currentAdaptation = adapter.getAdaptationForType(currentStreamInfo.index, type, currentStreamInfo); - - if (!newAdaptation || !currentAdaptation) { - // If there is no adaptation for neither the old or the new stream they're compatible - return !newAdaptation && !currentAdaptation; - } - - // If the current period is unencrypted and the upcoming one is encrypted we need to reset sourcebuffers. - return !(!_isAdaptationDrmProtected(currentAdaptation) && _isAdaptationDrmProtected(newAdaptation)); - } - - function _isAdaptationDrmProtected(adaptation) { - - if (!adaptation) { - // If there is no adaptation for neither the old or the new stream they're compatible - return false; - } - - // If the current period is unencrypted and the upcoming one is encrypted we need to reset sourcebuffers. - return !!(adaptation.ContentProtection || (adaptation.Representation.length > 0 && adaptation.Representation[0].ContentProtection)); - } - - function compareCodecs(newStream, type, previousStream = null) { - if (!newStream || !newStream.hasOwnProperty('getStreamInfo')) { - return false; - } - const newStreamInfo = newStream.getStreamInfo(); - const currentStreamInfo = previousStream ? previousStream.getStreamInfo() : getStreamInfo(); + function _initializationCompleted() { + const ln = streamProcessors.length; - if (!newStreamInfo || !currentStreamInfo) { - return false; + if (protectionController) { + // Need to check if streamProcessors exists because streamProcessors + // could be cleared in case an error is detected while initializing DRM keysystem + protectionController.clearMediaInfoArray(); + for (let i = 0; i < ln && streamProcessors[i]; i++) { + const type = streamProcessors[i].getType(); + const mediaInfo = streamProcessors[i].getMediaInfo(); + if (type === Constants.AUDIO || + type === Constants.VIDEO || + (type === Constants.TEXT && mediaInfo.isFragmented)) { + let mediaInfo = streamProcessors[i].getMediaInfo(); + if (mediaInfo) { + protectionController.initializeForMedia(mediaInfo); + } + } + } + protectionController.handleKeySystemFromManifest(); } - const newAdaptation = adapter.getAdaptationForType(newStreamInfo.index, type, newStreamInfo); - const currentAdaptation = adapter.getAdaptationForType(currentStreamInfo.index, type, currentStreamInfo); - - if (!newAdaptation || !currentAdaptation) { - // If there is no adaptation for neither the old or the new stream they're compatible - return !newAdaptation && !currentAdaptation; + if (!isInitialized) { + isInitialized = true; + videoModel.waitForReadyState(Constants.VIDEO_ELEMENT_READY_STATES.HAVE_METADATA, () => { + eventBus.trigger(Events.STREAM_INITIALIZED, { + streamInfo: streamInfo + }); + }) } - - const sameMimeType = newAdaptation && currentAdaptation && newAdaptation.mimeType === currentAdaptation.mimeType; - const oldCodecs = currentAdaptation.Representation.map((representation) => { - return representation.codecs; - }); - - const newCodecs = newAdaptation.Representation.map((representation) => { - return representation.codecs; - }); - - const codecMatch = newCodecs.some((newCodec) => { - return oldCodecs.indexOf(newCodec) > -1; - }); - - const partialCodecMatch = newCodecs.some((newCodec) => oldCodecs.some((oldCodec) => capabilities.codecRootCompatibleWithCodec(oldCodec, newCodec))); - return codecMatch || (partialCodecMatch && sameMimeType); } function setPreloaded(value) { @@ -1046,37 +985,59 @@ function Stream(config) { return adapter; } + function getCurrentRepresentationForType(type) { + const sp = _getProcessorByType(type); + + if (!sp) { + return null; + } + + return sp.getRepresentation(); + } + + function getCurrentMediaInfoForType(type) { + const sp = _getProcessorByType(type); + + if (!sp) { + return null; + } + + return sp.getMediaInfo(); + } + instance = { - initialize, - getStreamId, activate, deactivate, - getIsActive, + getAdapter, + getCurrentMediaInfoForType, + getCurrentRepresentationForType, getDuration, - getStartTime, - getId, - getStreamInfo, getHasAudioTrack, + getHasFinishedBuffering, getHasVideoTrack, - startPreloading, - initializeForTextWithMediaSource, + getId, + getIsActive, + getIsEndedEventSignaled, + getPreloaded, + getProcessors, + getRepresentationForTypeById, + getRepresentationForTypeByIndex, + getRepresentationsByType, + getStartTime, + getStreamId, + getStreamInfo, getThumbnailController, - getBitrateListFor, - updateData, + initialize, + initializeForTextWithMediaSource, + prepareQualityChange, + prepareTrackChange, reset, - getProcessors, - setMediaSource, - isMediaCodecCompatible, - isProtectionCompatible, - getPreloaded, - getIsEndedEventSignaled, setIsEndedEventSignaled, - getAdapter, - getHasFinishedBuffering, + setMediaSource, setPreloaded, + startPreloading, startScheduleControllers, - prepareTrackChange, - prepareQualityChange + updateData, }; setup(); diff --git a/src/streaming/StreamProcessor.js b/src/streaming/StreamProcessor.js index 3336381895..69c0450840 100644 --- a/src/streaming/StreamProcessor.js +++ b/src/streaming/StreamProcessor.js @@ -70,7 +70,6 @@ function StreamProcessor(config) { let fragmentModel = config.fragmentModel; let abrController = config.abrController; let playbackController = config.playbackController; - let throughputController = config.throughputController; let mediaController = config.mediaController; let textController = config.textController; let dashMetrics = config.dashMetrics; @@ -81,7 +80,7 @@ function StreamProcessor(config) { let instance, logger, isDynamic, - mediaInfo, + currentMediaInfo, mediaInfoArr, bufferController, scheduleController, @@ -92,13 +91,12 @@ function StreamProcessor(config) { dashHandler, segmentsController, bufferingTime, - pendingSwitchToRepresentationInfo; + pendingSwitchToVoRepresentation; function setup() { logger = Debug(context).getInstance().getLogger(instance); resetInitialSettings(); - eventBus.on(Events.DATA_UPDATE_COMPLETED, _onDataUpdateCompleted, instance, { priority: EventBus.EVENT_PRIORITY_HIGH }); // High priority to be notified before Stream eventBus.on(Events.INIT_FRAGMENT_NEEDED, _onInitFragmentNeeded, instance); eventBus.on(Events.MEDIA_FRAGMENT_NEEDED, _onMediaFragmentNeeded, instance); eventBus.on(Events.INIT_FRAGMENT_LOADED, _onInitFragmentLoaded, instance); @@ -207,18 +205,14 @@ function StreamProcessor(config) { return type; } - function getIsTextTrack() { - return adapter.getIsTextTrack(representationController.getData()); - } - function resetInitialSettings() { mediaInfoArr = []; - mediaInfo = null; + currentMediaInfo = null; bufferingTime = 0; shouldUseExplicitTimeForRequest = false; shouldRepeatRequest = false; qualityChangeInProgress = false; - pendingSwitchToRepresentationInfo = null; + pendingSwitchToVoRepresentation = null; } function reset(errored, keepBuffers) { @@ -249,7 +243,6 @@ function StreamProcessor(config) { abrController.unRegisterStreamType(getStreamId(), type); } - eventBus.off(Events.DATA_UPDATE_COMPLETED, _onDataUpdateCompleted, instance); eventBus.off(Events.INIT_FRAGMENT_NEEDED, _onInitFragmentNeeded, instance); eventBus.off(Events.MEDIA_FRAGMENT_NEEDED, _onMediaFragmentNeeded, instance); eventBus.off(Events.INIT_FRAGMENT_LOADED, _onInitFragmentLoaded, instance); @@ -271,8 +264,8 @@ function StreamProcessor(config) { streamInfo = null; } - function isUpdating() { - return representationController ? representationController.isUpdating() : false; + function setMediaInfoArray(value) { + mediaInfoArr = value; } /** @@ -324,12 +317,12 @@ function StreamProcessor(config) { const promises = []; - // append window has been reset by abort() operation. Set the correct values again + // Append window has been reset by abort() operation. Set the correct values again promises.push(bufferController.updateAppendWindow()); - // Timestamp offset couldve been changed by preloading period - const representationInfo = getRepresentationInfo(); - promises.push(bufferController.updateBufferTimestampOffset(representationInfo)); + // Timestamp offset could've been changed by preloading period + const voRepresentation = getRepresentation(); + promises.push(bufferController.updateBufferTimestampOffset(voRepresentation)); Promise.all(promises) .then(() => { @@ -398,7 +391,7 @@ function StreamProcessor(config) { return; } - if (getIsTextTrack() && !textController.isTextEnabled()) return; + if (currentMediaInfo.isText && !textController.isTextEnabled()) return; if (bufferController && e.representationId) { if (!bufferController.appendInitSegmentFromCache(e.representationId)) { @@ -409,7 +402,7 @@ function StreamProcessor(config) { return; } // Init segment not in cache, send new request - const request = dashHandler ? dashHandler.getInitRequest(mediaInfo, rep) : null; + const request = dashHandler ? dashHandler.getInitRequest(currentMediaInfo, rep) : null; if (request) { fragmentModel.executeRequest(request); } else if (rescheduleIfNoRequest) { @@ -455,7 +448,7 @@ function StreamProcessor(config) { request.delayLoadingTime = new Date().getTime() + scheduleController.getTimeToLoadDelay(); scheduleController.setTimeToLoadDelay(0); if (!_shouldIgnoreRequest(request)) { - logger.debug(`Next fragment request url for stream id ${streamInfo.id} and media type ${type} is ${request.url}`); + logger.debug(`Next fragment request url for stream id ${streamInfo.id} and media type ${type} is ${request.url} with request range ${request.range}`); fragmentModel.executeRequest(request); } else { logger.warn(`Fragment request url ${request.url} for stream id ${streamInfo.id} and media type ${type} is on the ignore list and will be skipped`); @@ -477,10 +470,10 @@ function StreamProcessor(config) { if (settings.get().streaming.gaps.enableSeekFix && (shouldUseExplicitTimeForRequest || playbackController.getTime() === 0)) { let adjustedTime; if (!isDynamic) { - adjustedTime = dashHandler.getValidTimeAheadOfTargetTime(bufferingTime, mediaInfo, representation, settings.get().streaming.gaps.threshold); + adjustedTime = dashHandler.getValidTimeAheadOfTargetTime(bufferingTime, currentMediaInfo, representation, settings.get().streaming.gaps.threshold); } else if (isDynamic && representation.segmentInfoType === DashConstants.SEGMENT_TIMELINE) { // If we find a valid request ahead of the current time then we are in a gap. Segments are only added at the end of the timeline - adjustedTime = dashHandler.getValidTimeAheadOfTargetTime(bufferingTime, mediaInfo, representation, settings.get().streaming.gaps.threshold,); + adjustedTime = dashHandler.getValidTimeAheadOfTargetTime(bufferingTime, currentMediaInfo, representation, settings.get().streaming.gaps.threshold,); } if (!isNaN(adjustedTime) && adjustedTime !== bufferingTime) { if (playbackController.isSeeking() || playbackController.getTime() === 0) { @@ -539,7 +532,6 @@ function StreamProcessor(config) { * @private */ function _getFragmentRequest() { - const representationInfo = getRepresentationInfo(); let request; if (isNaN(bufferingTime) || (getType() === Constants.TEXT && !textController.isTextEnabled())) { @@ -547,14 +539,14 @@ function StreamProcessor(config) { } if (dashHandler) { - const representation = representationController && representationInfo ? representationController.getRepresentationForQuality(representationInfo.quality) : null; + const representation = getRepresentation(); if (shouldUseExplicitTimeForRequest) { - request = dashHandler.getSegmentRequestForTime(mediaInfo, representation, bufferingTime); + request = dashHandler.getSegmentRequestForTime(currentMediaInfo, representation, bufferingTime); } else if (shouldRepeatRequest) { - request = dashHandler.repeatSegmentRequest(mediaInfo, representation); + request = dashHandler.repeatSegmentRequest(currentMediaInfo, representation); } else { - request = dashHandler.getNextSegmentRequest(mediaInfo, representation); + request = dashHandler.getNextSegmentRequest(currentMediaInfo, representation); } } @@ -569,11 +561,10 @@ function StreamProcessor(config) { scheduleController.startScheduleTimer(playbackController.getLowLatencyModeEnabled() ? settings.get().streaming.scheduling.lowLatencyTimeout : settings.get().streaming.scheduling.defaultTimeout); } - function _onDataUpdateCompleted(e) { - if (!e.error) { - if (!bufferController.getIsBufferingCompleted()) { - bufferController.updateBufferTimestampOffset(e.currentRepresentation); - } + function _onDataUpdateCompleted() { + const currentRepresentation = representationController.getCurrentRepresentation() + if (!bufferController.getIsBufferingCompleted()) { + bufferController.updateBufferTimestampOffset(currentRepresentation); } } @@ -627,59 +618,145 @@ function StreamProcessor(config) { // we save the last initialized quality. That way we make sure that the media fragments we are about to append match the init segment if (e.segmentType === HTTPRequest.INIT_SEGMENT_TYPE) { - const lastInitializedQuality = e.quality; - scheduleController.setLastInitializedQuality(lastInitializedQuality); - logger.info('[' + type + '] ' + 'lastInitializedRepresentationInfo changed to ' + e.quality); + const lastInitializedRepresentationId = e.representationId; + scheduleController.setLastInitializedRepresentationId(lastInitializedRepresentationId); + logger.info('[' + type + '] ' + 'lastInitializedRepresentationId changed to ' + lastInitializedRepresentationId); } - if (pendingSwitchToRepresentationInfo) { - _prepareForDefaultQualitySwitch(pendingSwitchToRepresentationInfo) + if (pendingSwitchToVoRepresentation) { + _prepareForDefaultQualitySwitch(pendingSwitchToVoRepresentation) } else { scheduleController.startScheduleTimer(0); } } + /** + * Called once the StreamProcessor is initialized and when the track is switched. We only have one StreamProcessor per media type. So we need to adjust the mediaInfo once we switch/select a track. + * @param {object} newMediaInfo + */ + function selectMediaInfo(newMediaInfo, targetRepresentation = null) { + return new Promise((resolve) => { + if (representationController) { + + // Switching to a new AdaptationSet as part of a quality switch + if (targetRepresentation) { + currentMediaInfo = newMediaInfo; + } + + // Switching to a new AS + else if ((currentMediaInfo === null || (!adapter.areMediaInfosEqual(currentMediaInfo, newMediaInfo)))) { + currentMediaInfo = newMediaInfo; + const bitrate = abrController.getInitialBitrateFor(type); + targetRepresentation = abrController.getOptimalRepresentationForBitrate(currentMediaInfo, bitrate, false); + } + + // MPD update quality remains the same + else { + currentMediaInfo = newMediaInfo; + targetRepresentation = representationController.getCurrentRepresentation() + } + + // Update Representation Controller with the new data + const voRepresentations = abrController.getPossibleVoRepresentations(currentMediaInfo, false); + const representationId = targetRepresentation.id; + return representationController.updateData(voRepresentations, currentMediaInfo.isFragmented, representationId) + .then(() => { + _onDataUpdateCompleted() + resolve(); + }) + .catch((e) => { + logger.error(e); + resolve() + }) + } else { + return Promise.resolve(); + } + }) + } + /** * The quality has changed which means we have switched to a different representation. * If we want to aggressively replace existing parts in the buffer we need to make sure that the new quality is higher than the already buffered one. * @param {object} e */ function prepareQualityChange(e) { - if (pendingSwitchToRepresentationInfo) { - logger.warning(`Canceling queued representation switch to ${pendingSwitchToRepresentationInfo.quality} for ${type}`); + if (!e.newRepresentation) { + return; + } + + if (pendingSwitchToVoRepresentation) { + logger.warn(`Canceling queued representation switch to ${pendingSwitchToVoRepresentation.id} for ${type}`); } - logger.debug(`Preparing quality switch for type ${type}`); - const newQuality = e.newQuality; + + if (e.isAdaptationSetSwitch) { + logger.debug(`Preparing quality switch to different AdaptationSet for type ${type}`); + _prepareAdaptationSwitchQualityChange(e) + } else { + logger.debug(`Preparing quality within the same AdaptationSet for type ${type}`); + _prepareNonAdaptationSwitchQualityChange(e) + } + } + + function _prepareNonAdaptationSwitchQualityChange(e) { + const newRepresentation = e.newRepresentation; qualityChangeInProgress = true; // Stop scheduling until we are done with preparing the quality switch scheduleController.clearScheduleTimer(); - representationController.prepareQualityChange(newQuality); + // Update selected Representation in RepresentationController + representationController.prepareQualityChange(newRepresentation); + + _handleDifferentSwitchTypes(e, newRepresentation); + } + + function _prepareAdaptationSwitchQualityChange(e) { + const newRepresentation = e.newRepresentation; + + qualityChangeInProgress = true; + + // Stop scheduling until we are done with preparing the quality switch + scheduleController.clearScheduleTimer(); + + // Informing ScheduleController about AS switch + scheduleController.setSwitchTrack(true); + + const newMediaInfo = newRepresentation.mediaInfo; + currentMediaInfo = newMediaInfo; - const representationInfo = getRepresentationInfo(newQuality); + selectMediaInfo(newMediaInfo, newRepresentation) + .then(() => { + _handleDifferentSwitchTypes(e, newRepresentation); + }) + } + + function _handleDifferentSwitchTypes(e, newRepresentation) { // If the switch should occur immediately we need to replace existing stuff in the buffer if (e.reason && e.reason.forceReplace) { - _prepareForForceReplacementQualitySwitch(representationInfo); + _prepareForForceReplacementQualitySwitch(newRepresentation); + } + + // We abandoned a current request + else if (e && e.reason && e.reason.forceAbandon) { + _prepareForAbandonQualitySwitch(newRepresentation) } // If fast switch is enabled we check if we are supposed to replace existing stuff in the buffer else if (settings.get().streaming.buffer.fastSwitchEnabled) { - _prepareForFastQualitySwitch(representationInfo, e); + _prepareForFastQualitySwitch(newRepresentation, e); } // Default quality switch. We append the new quality to the already buffered stuff else { - _prepareForDefaultQualitySwitch(representationInfo, e); + _prepareForDefaultQualitySwitch(newRepresentation); } dashMetrics.pushPlayListTraceMetrics(new Date(), PlayListTrace.REPRESENTATION_SWITCH_STOP_REASON); - dashMetrics.createPlaylistTraceMetrics(representationInfo.id, playbackController.getTime() * 1000, playbackController.getPlaybackRate()); - + dashMetrics.createPlaylistTraceMetrics(newRepresentation.id, playbackController.getTime() * 1000, playbackController.getPlaybackRate()); } - function _prepareForForceReplacementQualitySwitch(representationInfo) { + function _prepareForForceReplacementQualitySwitch(voRepresentation) { // Abort the current request to avoid inconsistencies and in case a rule such as AbandonRequestRule has forced a quality switch. A quality switch can also be triggered manually by the application. // If we update the buffer values now, or initialize a request to the new init segment, the currently downloading media segment might "work" with wrong values. @@ -692,8 +769,9 @@ function StreamProcessor(config) { streamId: streamInfo.id }, { mediaType: type, streamId: streamInfo.id }); + scheduleController.setCheckPlaybackQuality(false); // Abort appending segments to the buffer. Also adjust the appendWindow as we might have been in the progress of prebuffering stuff. - bufferController.prepareForForceReplacementQualitySwitch(representationInfo) + bufferController.prepareForForceReplacementQualitySwitch(voRepresentation) .then(() => { _bufferClearedForReplacement(); qualityChangeInProgress = false; @@ -704,23 +782,23 @@ function StreamProcessor(config) { }); } - function _prepareForFastQualitySwitch(representationInfo, e) { + function _prepareForFastQualitySwitch(voRepresentation) { // if we switch up in quality and need to replace existing parts in the buffer we need to adjust the buffer target const time = playbackController.getTime(); - let safeBufferLevel = 1.5 * (!isNaN(representationInfo.fragmentDuration) ? representationInfo.fragmentDuration : 1); + let safeBufferLevel = 1.5 * (!isNaN(voRepresentation.fragmentDuration) ? voRepresentation.fragmentDuration : 1); const request = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_EXECUTED, time: time + safeBufferLevel, threshold: 0 })[0]; - if (request && !getIsTextTrack()) { + if (request && !currentMediaInfo.isText) { const bufferLevel = bufferController.getBufferLevel(); const abandonmentState = abrController.getAbandonmentStateFor(streamInfo.id, type); // The new quality is higher than the one we originally requested - if (request.quality < representationInfo.quality && bufferLevel >= safeBufferLevel && abandonmentState === MetricsConstants.ALLOW_LOAD) { - bufferController.updateBufferTimestampOffset(representationInfo) + if (request.bandwidth < voRepresentation.bandwidth && bufferLevel >= safeBufferLevel && abandonmentState === MetricsConstants.ALLOW_LOAD) { + bufferController.updateBufferTimestampOffset(voRepresentation) .then(() => { // Abort the current request to avoid inconsistencies. A quality switch can also be triggered manually by the application. // If we update the buffer values now, or initialize a request to the new init segment, the currently downloading media segment might "work" with wrong values. @@ -739,54 +817,43 @@ function StreamProcessor(config) { // If we have buffered a higher quality we do not replace anything. We might cancel the current request due to abandon request rule else { - _prepareForDefaultQualitySwitch(representationInfo, e); + _prepareForDefaultQualitySwitch(voRepresentation); } } else { - scheduleController.startScheduleTimer(); - qualityChangeInProgress = false; + _prepareForDefaultQualitySwitch(voRepresentation); } } - function _prepareForDefaultQualitySwitch(representationInfo, e) { - - // Check if we need to abandon the request caused by the AbandonRequestRule - if (e && e.reason && e.reason.forceAbandon) { - _handleAbandonQualitySwitch() + function _prepareForDefaultQualitySwitch(voRepresentation) { + // We are not canceling the current request. Check if there is still an ongoing request. If so we wait for the request to be finished and the media to be appended + const ongoingRequests = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_LOADING }) + if (ongoingRequests && ongoingRequests.length > 0) { + logger.debug('Preparing for default quality switch: Waiting for ongoing segment request to be finished before applying switch.') + pendingSwitchToVoRepresentation = voRepresentation; return; } - // We are not canceling the current request. Check if there is still an ongoing request. If so we wait for the request to be finished and the media to be appended - else { - const ongoingRequests = fragmentModel.getRequests({ state: FragmentModel.FRAGMENT_MODEL_LOADING }) - if (ongoingRequests && ongoingRequests.length > 0) { - logger.debug('Preparing for default quality switch: Waiting for ongoing segment request to be finished before applying switch.') - pendingSwitchToRepresentationInfo = representationInfo; - return; - } - } - bufferController.updateBufferTimestampOffset(representationInfo) + bufferController.updateBufferTimestampOffset(voRepresentation) .then(() => { - if (mediaInfo.segmentAlignment || mediaInfo.subSegmentAlignment) { + scheduleController.setCheckPlaybackQuality(false); + if (currentMediaInfo.segmentAlignment || currentMediaInfo.subSegmentAlignment) { scheduleController.startScheduleTimer(); } else { _bufferClearedForNonReplacement() } - pendingSwitchToRepresentationInfo = null; + pendingSwitchToVoRepresentation = null; qualityChangeInProgress = false; }) .catch(() => { - pendingSwitchToRepresentationInfo = null; + pendingSwitchToVoRepresentation = null; qualityChangeInProgress = false; }) } - function _handleAbandonQualitySwitch(representationInfo) { - bufferController.updateBufferTimestampOffset(representationInfo) + function _prepareForAbandonQualitySwitch(voRepresentation) { + bufferController.updateBufferTimestampOffset(voRepresentation) .then(() => { - // Abort the current request to avoid inconsistencies. A quality switch can also be triggered manually by the application. - // If we update the buffer values now, or initialize a request to the new init segment, the currently downloading media segment might "work" with wrong values. - // Everything that is already in the buffer queue is ok fragmentModel.abortRequests(); shouldRepeatRequest = true; scheduleController.setCheckPlaybackQuality(false); @@ -831,7 +898,7 @@ function StreamProcessor(config) { function _onFragmentLoadingCompleted(e) { logger.info('OnFragmentLoadingCompleted for stream id ' + streamInfo.id + ' and media type ' + type + ' - Url:', e.request ? e.request.url : 'undefined', e.request.range ? ', Range:' + e.request.range : ''); - if (getIsTextTrack()) { + if (currentMediaInfo.isText) { scheduleController.startScheduleTimer(0); } @@ -943,59 +1010,12 @@ function StreamProcessor(config) { return streamInfo; } - /** - * Called once the StreamProcessor is initialized and when the track is switched. We only have one StreamProcessor per media type. So we need to adjust the mediaInfo once we switch/select a track. - * @param {object} newMediaInfo - */ - function selectMediaInfo(newMediaInfo) { - if (newMediaInfo !== mediaInfo && (!newMediaInfo || !mediaInfo || (newMediaInfo.type === mediaInfo.type))) { - mediaInfo = newMediaInfo; - } - - const newRealAdaptation = adapter.getRealAdaptation(streamInfo, mediaInfo); - const voRepresentations = adapter.getVoRepresentations(mediaInfo); - - if (representationController) { - const realAdaptation = representationController.getData(); - const maxQuality = abrController.getMaxAllowedIndexFor(type, streamInfo.id); - const minIdx = abrController.getMinAllowedIndexFor(type, streamInfo.id); - - let quality, - averageThroughput; - let bitrate = null; - - if ((realAdaptation === null || (realAdaptation.id !== newRealAdaptation.id)) && type !== Constants.TEXT) { - averageThroughput = throughputController.getAverageThroughput(type); - bitrate = averageThroughput || abrController.getInitialBitrateFor(type, streamInfo.id); - quality = abrController.getQualityForBitrate(mediaInfo, bitrate, streamInfo.id); - } else { - quality = abrController.getQualityFor(type, streamInfo.id); - } - - if (minIdx !== undefined && quality < minIdx) { - quality = minIdx; - } - if (quality > maxQuality) { - quality = maxQuality; - } - return representationController.updateData(newRealAdaptation, voRepresentations, type, mediaInfo.isFragmented, quality); - } else { - return Promise.resolve(); - } - } - - function addMediaInfo(newMediaInfo) { - if (mediaInfoArr.indexOf(newMediaInfo) === -1) { - mediaInfoArr.push(newMediaInfo); - } - } - - function clearMediaInfoArray() { - mediaInfoArr = []; + function getMediaInfo() { + return currentMediaInfo; } - function getMediaInfo() { - return mediaInfo; + function getAllMediaInfos() { + return mediaInfoArr; } function getMediaSource() { @@ -1003,7 +1023,7 @@ function StreamProcessor(config) { } function setMediaSource(mediaSource) { - return bufferController.setMediaSource(mediaSource, mediaInfo); + return bufferController.setMediaSource(mediaSource, currentMediaInfo); } function getScheduleController() { @@ -1013,19 +1033,20 @@ function StreamProcessor(config) { /** * Get a specific voRepresentation. If quality parameter is defined, this function will return the voRepresentation for this quality. * Otherwise, this function will return the current voRepresentation used by the representationController. - * @param {number} quality - quality index of the voRepresentaion expected. + * @param {number} id - id of the voRepresentation. + * @return {object} voRepresentation */ - function getRepresentationInfo(quality) { + function getRepresentation(id = NaN) { let voRepresentation; - if (quality !== undefined) { - checkInteger(quality); - voRepresentation = representationController ? representationController.getRepresentationForQuality(quality) : null; + if (!isNaN(id)) { + checkInteger(id); + voRepresentation = representationController ? representationController.getRepresentationById(id) : null; } else { voRepresentation = representationController ? representationController.getCurrentRepresentation() : null; } - return adapter.convertRepresentationToRepresentationInfo(voRepresentation); + return voRepresentation } function isBufferingCompleted() { @@ -1041,14 +1062,11 @@ function StreamProcessor(config) { * @return {FragmentRequest|null} */ function probeNextRequest() { - const representationInfo = getRepresentationInfo(); - - const representation = representationController && representationInfo ? - representationController.getRepresentationForQuality(representationInfo.quality) : null; + const voRepresentation = getRepresentation(); return dashHandler.getNextSegmentRequestIdempotent( - mediaInfo, - representation + currentMediaInfo, + voRepresentation ); } @@ -1058,32 +1076,28 @@ function StreamProcessor(config) { } const chunk = e.chunk; const bytes = chunk.bytes; - const quality = chunk.quality; - const currentRepresentation = getRepresentationInfo(quality); - const voRepresentation = representationController && currentRepresentation ? representationController.getRepresentationForQuality(currentRepresentation.quality) : null; - if (currentRepresentation && voRepresentation) { - voRepresentation.timescale = boxParser.getMediaTimescaleFromMoov(bytes); + const representationId = chunk.representation.id; + const currentRepresentation = getRepresentation(representationId); + if (currentRepresentation) { + currentRepresentation.timescale = boxParser.getMediaTimescaleFromMoov(bytes); } } function _onMediaFragmentLoaded(e) { const chunk = e.chunk; - const bytes = chunk.bytes; - const quality = chunk.quality; - const currentRepresentation = getRepresentationInfo(quality); - const voRepresentation = representationController && currentRepresentation ? representationController.getRepresentationForQuality(currentRepresentation.quality) : null; - + const representationId = chunk.representation.id; + const currentRepresentation = getRepresentation(representationId); // If we switch tracks this event might be fired after the representations in the RepresentationController have been updated according to the new MediaInfo. // In this case there will be no currentRepresentation and voRepresentation matching the "old" quality - if (currentRepresentation && voRepresentation) { + if (currentRepresentation) { let isoFile; // Check for inband prft on media segment (if enabled) if (settings.get().streaming.parseInbandPrft && e.request.type === HTTPRequest.MEDIA_SEGMENT_TYPE) { isoFile = isoFile ? isoFile : boxParser.parse(bytes); - const timescale = voRepresentation.timescale; + const timescale = currentRepresentation.timescale; const prfts = _handleInbandPrfts(isoFile, timescale); if (prfts && prfts.length) { eventBus.trigger(MediaPlayerEvents.INBAND_PRFT, @@ -1094,14 +1108,10 @@ function StreamProcessor(config) { } const eventStreamMedia = adapter.getEventsFor(currentRepresentation.mediaInfo, null, streamInfo); - const eventStreamTrack = adapter.getEventsFor(currentRepresentation, voRepresentation, streamInfo); + const eventStreamTrack = adapter.getEventsFor(currentRepresentation, currentRepresentation, streamInfo); if (eventStreamMedia && eventStreamMedia.length > 0 || eventStreamTrack && eventStreamTrack.length > 0) { - const request = fragmentModel.getRequests({ - state: FragmentModel.FRAGMENT_MODEL_EXECUTED, - quality: quality, - index: chunk.index - })[0]; + const request = e.request; isoFile = isoFile ? isoFile : boxParser.parse(bytes); const events = _handleInbandEvents(isoFile, request, eventStreamMedia, eventStreamTrack); @@ -1197,7 +1207,7 @@ function StreamProcessor(config) { return Promise.resolve(buffer); } - return bufferController ? bufferController.createBufferSink(mediaInfo, previousBufferSinks) : Promise.resolve(null); + return bufferController ? bufferController.createBufferSink(currentMediaInfo, previousBufferSinks) : Promise.resolve(null); } function prepareTrackSwitch() { @@ -1208,7 +1218,8 @@ function StreamProcessor(config) { // when buffering is completed and we are not supposed to replace anything do nothing. // Still we need to trigger preloading again and call change type in case user seeks back before transitioning to next period if (bufferController.getIsBufferingCompleted() && !shouldReplace) { - bufferController.prepareForNonReplacementTrackSwitch(mediaInfo.codec) + const representation = representationController.getCurrentRepresentation() + bufferController.prepareForNonReplacementTrackSwitch(representation) .then(() => { eventBus.trigger(Events.BUFFERING_COMPLETED, {}, { streamId: streamInfo.id, mediaType: type }) }) @@ -1235,11 +1246,11 @@ function StreamProcessor(config) { fragmentModel.abortRequests(); // Abort appending segments to the buffer. Also adjust the appendWindow as we might have been in the progress of prebuffering stuff. - bufferController.prepareForReplacementTrackSwitch(mediaInfo.codec) + const representation = getRepresentation() + bufferController.prepareForReplacementTrackSwitch(representation) .then(() => { // Timestamp offset couldve been changed by preloading period - const representationInfo = getRepresentationInfo(); - return bufferController.updateBufferTimestampOffset(representationInfo); + return bufferController.updateBufferTimestampOffset(representation); }) .then(() => { _bufferClearedForReplacement(); @@ -1251,7 +1262,8 @@ function StreamProcessor(config) { }); } else { // We do not replace anything that is already in the buffer. Still we need to prepare the buffer for the track switch - bufferController.prepareForNonReplacementTrackSwitch(mediaInfo.codec) + const representation = getRepresentation() + bufferController.prepareForNonReplacementTrackSwitch(representation) .then(() => { _bufferClearedForNonReplacement(); resolve(); @@ -1319,7 +1331,6 @@ function StreamProcessor(config) { errHandler, mediaController, representationController, - adapter, textController, abrController, playbackController, @@ -1347,11 +1358,10 @@ function StreamProcessor(config) { } instance = { - addMediaInfo, - clearMediaInfoArray, createBufferSinks, dischargePreBuffer, finalisePlayList, + getAllMediaInfos, getBuffer, getBufferController, getBufferLevel, @@ -1359,20 +1369,20 @@ function StreamProcessor(config) { getMediaInfo, getMediaSource, getRepresentationController, - getRepresentationInfo, + getRepresentation, getScheduleController, getStreamId, getStreamInfo, getType, initialize, isBufferingCompleted, - isUpdating, prepareInnerPeriodPlaybackSeeking, prepareOuterPeriodPlaybackSeeking, prepareQualityChange, prepareTrackSwitch, probeNextRequest, reset, + setMediaInfoArray, selectMediaInfo, setExplicitBufferingTime, setMediaSource, diff --git a/src/streaming/controllers/AbrController.js b/src/streaming/controllers/AbrController.js index 22ef95c037..23af2ee547 100644 --- a/src/streaming/controllers/AbrController.js +++ b/src/streaming/controllers/AbrController.js @@ -32,7 +32,6 @@ import ABRRulesCollection from '../rules/abr/ABRRulesCollection.js'; import Constants from '../constants/Constants.js'; import MetricsConstants from '../constants/MetricsConstants.js'; -import BitrateInfo from '../vo/BitrateInfo.js'; import FragmentModel from '../models/FragmentModel.js'; import EventBus from '../../core/EventBus.js'; import Events from '../../core/events/Events.js'; @@ -42,12 +41,10 @@ import SwitchRequest from '../rules/SwitchRequest.js'; import SwitchRequestHistory from '../rules/SwitchRequestHistory.js'; import DroppedFramesHistory from '../rules/DroppedFramesHistory.js'; import Debug from '../../core/Debug.js'; -import {checkInteger} from '../utils/SupervisorTools.js'; import MediaPlayerEvents from '../MediaPlayerEvents.js'; const DEFAULT_VIDEO_BITRATE = 1000; -const DEFAULT_AUDIO_BITRATE = 100; -const QUALITY_DEFAULT = 0; +const DEFAULT_BITRATE = 100; function AbrController() { @@ -59,21 +56,17 @@ function AbrController() { logger, abrRulesCollection, streamController, - topQualities, - qualityDict, streamProcessorDict, abandonmentStateDict, abandonmentTimeout, windowResizeEventCalled, - elementWidth, - elementHeight, adapter, videoModel, mediaPlayerModel, customParametersModel, cmsdModel, domStorage, - playbackIndex, + playbackRepresentationId, switchHistoryDict, droppedFramesHistory, throughputController, @@ -86,7 +79,7 @@ function AbrController() { } /** - * Initialize everything that is not Stream specific. We only have one instance of the ABR Controller for all periods. + * Initialize everything that is not period specific. We only have one instance of the ABR Controller for all periods. */ function initialize() { droppedFramesHistory = DroppedFramesHistory(context).create(); @@ -137,11 +130,13 @@ function AbrController() { abrRulesCollection.setBolaState(type, settings.get().streaming.abr.activeRules.bolaRule && !_shouldApplyDynamicAbrStrategy()); } - if (type === Constants.VIDEO) { - setElementSize(); - } } + /** + * Remove all parameters that belong to a specific period + * @param {string} streamId + * @param {string} type + */ function unRegisterStreamType(streamId, type) { try { if (streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { @@ -162,8 +157,6 @@ function AbrController() { } function resetInitialSettings() { - topQualities = {}; - qualityDict = {}; abandonmentStateDict = {}; streamProcessorDict = {}; switchHistoryDict = {}; @@ -175,7 +168,7 @@ function AbrController() { droppedFramesHistory.reset(); } - playbackIndex = undefined; + playbackRepresentationId = undefined; droppedFramesHistory = undefined; clearTimeout(abandonmentTimeout); abandonmentTimeout = null; @@ -229,345 +222,365 @@ function AbrController() { } } - function checkConfig() { - if (!domStorage || !domStorage.hasOwnProperty('getSavedBitrateSettings')) { - throw new Error(Constants.MISSING_CONFIG_ERROR); - } - } + function getOptimalRepresentationForBitrate(mediaInfo, bitrate, includeCompatibleMediaInfos = true) { + const possibleVoRepresentations = getPossibleVoRepresentations(mediaInfo, includeCompatibleMediaInfos); - /** - * While fragment loading is in progress we check if we might need to abort the request - * @param {object} e - * @private - */ - function _onFragmentLoadProgress(e) { - const type = e.request.mediaType; - const streamId = e.streamId; - - if (!type || !streamId || !streamProcessorDict[streamId] || !settings.get().streaming.abr.autoSwitchBitrate[type]) { - return; + if (!possibleVoRepresentations || possibleVoRepresentations.length === 0) { + return null; } - const streamProcessor = streamProcessorDict[streamId][type]; - if (!streamProcessor) { - return; + // If bitrate should be as small as possible return the Representation with the lowest bitrate + if (bitrate <= 0) { + return possibleVoRepresentations.sort((a, b) => { + return a.bandwidth - b.bandwidth; + })[0] } - const rulesContext = RulesContext(context).create({ - abrController: instance, - streamProcessor, - currentRequest: e.request, - throughputController, - videoModel + // Get all Representations that have lower or equal bitrate than our target bitrate + const targetRepresentations = possibleVoRepresentations.filter((rep) => { + return rep.bitrateInKbit <= bitrate }); - const switchRequest = abrRulesCollection.shouldAbandonFragment(rulesContext, streamId); - - if (switchRequest.quality > SwitchRequest.NO_CHANGE) { - const fragmentModel = streamProcessor.getFragmentModel(); - const request = fragmentModel.getRequests({ - state: FragmentModel.FRAGMENT_MODEL_LOADING, - index: e.request.index - })[0]; - if (request) { - abandonmentStateDict[streamId][type].state = MetricsConstants.ABANDON_LOAD; - switchHistoryDict[streamId][type].reset(); - switchHistoryDict[streamId][type].push({ - oldValue: getQualityFor(type, streamId), - newValue: switchRequest.quality, - confidence: 1, - reason: switchRequest.reason - }); - setPlaybackQuality(type, streamController.getActiveStreamInfo(), switchRequest.quality, switchRequest.reason); - - clearTimeout(abandonmentTimeout); - abandonmentTimeout = setTimeout( - () => { - abandonmentStateDict[streamId][type].state = MetricsConstants.ALLOW_LOAD; - abandonmentTimeout = null; - }, - settings.get().streaming.abandonLoadTimeout - ); - } + + if (!targetRepresentations || targetRepresentations.length === 0) { + return possibleVoRepresentations[0]; } + + return targetRepresentations[targetRepresentations.length - 1]; } - /** - * Update dropped frames history when the quality was changed - * @param {object} e - * @private - */ - function _onQualityChangeRendered(e) { - if (e.mediaType === Constants.VIDEO) { - if (playbackIndex !== undefined) { - droppedFramesHistory.push(e.streamId, playbackIndex, videoModel.getPlaybackQuality()); - } - playbackIndex = e.newQuality; + function getRepresentationByAbsoluteIndex(absoluteIndex, mediaInfo, includeCompatibleMediaInfos = true) { + if (isNaN(absoluteIndex) || absoluteIndex < 0) { + return null; } + + const possibleVoRepresentations = getPossibleVoRepresentations(mediaInfo, includeCompatibleMediaInfos); + + return possibleVoRepresentations.find((rep) => { + return rep.absoluteIndex === absoluteIndex + }) } - /** - * When the buffer level is updated we check if we need to change the ABR strategy - * @param e - * @private - */ - function _onMetricAdded(e) { - if (_shouldApplyDynamicAbrStrategy() - && e.metric === MetricsConstants.BUFFER_LEVEL - && (e.mediaType === Constants.AUDIO || e.mediaType === Constants.VIDEO)) { - _updateDynamicAbrStrategy(e.mediaType, 0.001 * e.value.level); + function getPossibleVoRepresentations(mediaInfo, includeCompatibleMediaInfos = true) { + let voRepresentations = []; + if (!mediaInfo) { + return voRepresentations; } + + const mediaInfos = _getPossibleMediaInfos(mediaInfo) + mediaInfos.forEach((mediaInfo) => { + let currentVoRepresentations = adapter.getVoRepresentations(mediaInfo); + + if (currentVoRepresentations && currentVoRepresentations.length > 0) { + voRepresentations = voRepresentations.concat(currentVoRepresentations) + } + }) + + // Now sort by quality (usually simply by bitrate) + voRepresentations = _sortByCalculatedQualityRank(voRepresentations); + + // Filter the list of options based on the provided settings + // We can not apply the filter before otherwise the absolute index would be wrong + voRepresentations = _filterByAllowedSettings(voRepresentations) + + // Add an absolute index after filtering + voRepresentations.forEach((rep, index) => { + rep.absoluteIndex = index + }) + + // Filter the Representations in case we do not want to include compatible Media Infos + // We can not apply the filter before otherwise the absolute index would be wrong + if (!includeCompatibleMediaInfos) { + voRepresentations = voRepresentations.filter((rep) => { + return adapter.areMediaInfosEqual(rep.mediaInfo, mediaInfo); + }) + } + + return voRepresentations; } - /** - * Returns the highest possible index taking limitations like maxBitrate, representationRatio and portal size into account. - * @param {string} type - * @param {string} streamId - * @return {undefined|number} - */ - function getMaxAllowedIndexFor(type, streamId) { + function _getPossibleMediaInfos(mediaInfo) { try { - let idx; - topQualities[streamId] = topQualities[streamId] || {}; + const possibleMediaInfos = []; - if (!topQualities[streamId].hasOwnProperty(type)) { - topQualities[streamId][type] = 0; + if (mediaInfo) { + possibleMediaInfos.push(mediaInfo); } - idx = _checkMaxBitrate(type, streamId); - idx = _checkMaxRepresentationRatio(idx, type, streamId); - idx = _checkPortalSize(idx, type, streamId); - // Apply maximum suggested bitrate from CMSD headers if enabled - if (settings.get().streaming.cmsd.enabled && settings.get().streaming.cmsd.abr.applyMb) { - idx = _checkCmsdMaxBitrate(idx, type, streamId); + // If AS switching is disabled return only the current MediaInfo + if (!settings.get().streaming.abr.enableSupplementalPropertyAdaptationSetSwitching + || !mediaInfo.adaptationSetSwitchingCompatibleIds + || mediaInfo.adaptationSetSwitchingCompatibleIds.length === 0) { + return possibleMediaInfos } - return idx; + + // Otherwise add everything that is compatible + const mediaInfoArr = streamProcessorDict[mediaInfo.streamInfo.id][mediaInfo.type].getAllMediaInfos() + const compatibleMediaInfos = mediaInfoArr.filter((entry) => { + return mediaInfo.adaptationSetSwitchingCompatibleIds.includes(entry.id) + }) + + return possibleMediaInfos.concat(compatibleMediaInfos); } catch (e) { - return undefined + return [mediaInfo] } } /** - * Returns the minimum allowed index. We consider thresholds defined in the settings, i.e. minBitrate for the corresponding media type. - * @param {string} type - * @param {string} streamId - * @return {undefined|number} + * @param {Representation[]} voRepresentations + * @return {Representation[]} */ - function getMinAllowedIndexFor(type, streamId) { + function _filterByAllowedSettings(voRepresentations) { try { - return _getMinIndexBasedOnBitrateFor(type, streamId); + voRepresentations = _filterByPossibleBitrate(voRepresentations); + voRepresentations = _filterByPortalSize(voRepresentations); + voRepresentations = _filterByCmsdMaxBitrate(voRepresentations); + + return voRepresentations; } catch (e) { - return undefined + logger.error(e); + return voRepresentations } } /** - * Returns the maximum allowed index. - * @param {string} type - * @param {string} streamId - * @return {undefined|number} + * Returns all RepresentationInfo objects that have at least one bitrate that fulfills the constraint + * @param {Representation[]} voRepresentations + * @return {Representation[]} */ - function _getMaxIndexBasedOnBitrateFor(type, streamId) { + function _filterByPossibleBitrate(voRepresentations) { try { - const maxBitrate = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', type); - if (maxBitrate > -1) { - return getQualityForBitrate(streamProcessorDict[streamId][type].getMediaInfo(), maxBitrate, streamId); - } else { - return undefined; + const filteredArray = voRepresentations.filter((voRepresentation) => { + const type = voRepresentation.mediaInfo.type; + const representationBitrate = voRepresentation.bitrateInKbit; + const maxBitrate = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', type); + const minBitrate = mediaPlayerModel.getAbrBitrateParameter('minBitrate', type); + + if (maxBitrate > -1 && representationBitrate > maxBitrate) { + return false; + } + + return !(minBitrate > -1 && representationBitrate < minBitrate); + }) + + if (filteredArray.length > 0) { + return filteredArray } + + return voRepresentations } catch (e) { - return undefined + logger.error(e); + return voRepresentations } } /** - * Returns the minimum allowed index. - * @param {string} type - * @param {string} streamId - * @return {undefined|number} + * @param {Representation[]} voRepresentations + * @return {Representation[]} + * @private */ - function _getMinIndexBasedOnBitrateFor(type, streamId) { + function _filterByPortalSize(voRepresentations) { try { - const minBitrate = mediaPlayerModel.getAbrBitrateParameter('minBitrate', type); - - if (minBitrate > -1) { - const mediaInfo = streamProcessorDict[streamId][type].getMediaInfo(); - const bitrateList = getBitrateList(mediaInfo); - // This returns the quality index <= for the given bitrate - let minIdx = getQualityForBitrate(mediaInfo, minBitrate, streamId); - if (bitrateList[minIdx] && minIdx < bitrateList.length - 1 && bitrateList[minIdx].bitrate < minBitrate * 1000) { - minIdx++; // Go to the next bitrate - } - return minIdx; - } else { - return undefined; + if (!settings.get().streaming.abr.limitBitrateByPortal) { + return voRepresentations; } + + const { elementWidth } = videoModel.getVideoElementSize(); + + const filteredArray = voRepresentations.filter((voRepresentation) => { + return voRepresentation.mediaInfo.type !== Constants.VIDEO || voRepresentation.width <= elementWidth; + }) + + if (filteredArray.length > 0) { + return filteredArray + } + + return voRepresentations } catch (e) { - return undefined; + logger.error(e); + return voRepresentations } } /** - * Returns the maximum possible index - * @param type - * @param streamId - * @return {number|*} + * @param {Representation[]} voRepresentations + * @return {Representation[]} */ - function _checkMaxBitrate(type, streamId) { - let idx = topQualities[streamId][type]; - let newIdx = idx; + function _filterByCmsdMaxBitrate(voRepresentations) { + try { + // Check CMSD max suggested bitrate only for video segments + if (!settings.get().streaming.cmsd.enabled || !settings.get().streaming.cmsd.abr.applyMb) { + return voRepresentations + } - if (!streamProcessorDict[streamId] || !streamProcessorDict[streamId][type]) { - return newIdx; - } + const filteredArray = voRepresentations.filter((voRepresentation) => { + const type = voRepresentation.mediaInfo.type; + let maxCmsdBitrate = cmsdModel.getMaxBitrate(type); - const minIdx = getMinAllowedIndexFor(type, streamId); - if (minIdx !== undefined) { - newIdx = Math.max(idx, minIdx); - } + if (type !== Constants.VIDEO || maxCmsdBitrate < 0) { + return true + } + // Subtract audio bitrate + const streamId = voRepresentation.mediaInfo.streamInfo.id; + const streamProcessor = streamProcessorDict[streamId][Constants.AUDIO]; + const representation = streamProcessor.getRepresentation(); + const audioBitrate = representation.bitrateInKbit; + maxCmsdBitrate -= audioBitrate ? audioBitrate : 0; + return voRepresentation.bitrateInKbit <= maxCmsdBitrate + }) + + if (filteredArray.length > 0) { + return filteredArray + } - const maxIdx = _getMaxIndexBasedOnBitrateFor(type, streamId); - if (maxIdx !== undefined) { - newIdx = Math.min(newIdx, maxIdx); + return voRepresentations + } catch (e) { + logger.error(e); + return voRepresentations } - - return newIdx; - } - - /** - * Returns the maximum possible index from CMSD model - * @param type - * @param streamId - * @return {number|*} - */ - function _checkCmsdMaxBitrate(idx, type, streamId) { - // Check CMSD max suggested bitrate only for video segments - if (type !== 'video') { - return idx; - } - // Get max suggested bitrate - let maxCmsdBitrate = cmsdModel.getMaxBitrate(type); - if (maxCmsdBitrate < 0) { - return idx; - } - // Substract audio bitrate - const audioBitrate = _getBitrateInfoForQuality(streamId, 'audio', getQualityFor('audio', streamId)); - maxCmsdBitrate -= audioBitrate ? (audioBitrate.bitrate / 1000) : 0; - const maxIdx = getQualityForBitrate(streamProcessorDict[streamId][type].getMediaInfo(), maxCmsdBitrate, streamId); - logger.debug('Stream ID: ' + streamId + ' [' + type + '] Apply max bit rate from CMSD: ' + maxCmsdBitrate); - return Math.min(idx, maxIdx); } /** - * Returns the maximum index according to maximum representation ratio - * @param idx - * @param type - * @param streamId - * @return {number|*} + * Calculate a quality rank based on bandwidth, codec and qualityRanking. Lower value means better quality. + * @param voRepresentations * @private */ - function _checkMaxRepresentationRatio(idx, type, streamId) { - let maxIdx = topQualities[streamId][type] - const maxRepresentationRatio = settings.get().streaming.abr.maxRepresentationRatio[type]; + function _sortByCalculatedQualityRank(voRepresentations) { + + // All Representations must have a qualityRanking otherwise we ignore it + // QualityRanking only applies to Representations within one AS. If we merged multiple AS based on the adaptation-set-switching-2016 supplemental property we can not apply this logic + let firstMediaInfo = null; + const filteredRepresentations = voRepresentations.filter((rep) => { + if (!firstMediaInfo) { + firstMediaInfo = rep.mediaInfo; + } + return !isNaN(rep.qualityRanking) && adapter.areMediaInfosEqual(firstMediaInfo, rep.mediaInfo); + }) - if (isNaN(maxRepresentationRatio) || maxRepresentationRatio >= 1 || maxRepresentationRatio < 0) { - return idx; + if (filteredRepresentations.length === voRepresentations.length) { + voRepresentations.sort((a, b) => { + return b.qualityRanking - a.qualityRanking; + }) + } else { + voRepresentations.sort((a, b) => { + return a.bandwidth - b.bandwidth; + }) } - return Math.min(idx, Math.round(maxIdx * maxRepresentationRatio)); + + return voRepresentations } /** - * Returns the maximum index according to the portal size - * @param idx - * @param type - * @param streamId - * @return {number|*} + * While fragment loading is in progress we check if we might need to abort the request + * @param {object} e * @private */ - function _checkPortalSize(idx, type, streamId) { - if (type !== Constants.VIDEO || !settings.get().streaming.abr.limitBitrateByPortal || !streamProcessorDict[streamId] || !streamProcessorDict[streamId][type]) { - return idx; + function _onFragmentLoadProgress(e) { + const type = e.request.mediaType; + const streamId = e.streamId; + + if (!type || !streamId || !streamProcessorDict[streamId] || !settings.get().streaming.abr.autoSwitchBitrate[type]) { + return; } - if (!windowResizeEventCalled) { - setElementSize(); + const streamProcessor = streamProcessorDict[streamId][type]; + if (!streamProcessor) { + return; } - const streamInfo = streamProcessorDict[streamId][type].getStreamInfo(); - const representation = adapter.getAdaptationForType(streamInfo.index, type, streamInfo).Representation; - let newIdx = idx; - if (elementWidth > 0 && elementHeight > 0) { - while ( - newIdx > 0 && - representation[newIdx] && - elementWidth < representation[newIdx].width && - elementWidth - representation[newIdx - 1].width < representation[newIdx].width - elementWidth) { - newIdx = newIdx - 1; - } + const rulesContext = RulesContext(context).create({ + abrController: instance, + streamProcessor, + currentRequest: e.request, + throughputController, + videoModel + }); + const switchRequest = abrRulesCollection.shouldAbandonFragment(rulesContext); - // Make sure that in case of multiple representation elements have same - // resolution, every such element is included - while (newIdx < representation.length - 1 && representation[newIdx].width === representation[newIdx + 1].width) { - newIdx = newIdx + 1; - } + if (switchRequest && switchRequest.representation !== SwitchRequest.NO_CHANGE) { + _onSegmentDownloadShouldBeAbandoned(e, streamId, type, streamProcessor, switchRequest); } + } - return newIdx; + function _onSegmentDownloadShouldBeAbandoned(e, streamId, type, streamProcessor, switchRequest) { + const fragmentModel = streamProcessor.getFragmentModel(); + const request = fragmentModel.getRequests({ + state: FragmentModel.FRAGMENT_MODEL_LOADING, + index: e.request.index + })[0]; + if (request) { + abandonmentStateDict[streamId][type].state = MetricsConstants.ABANDON_LOAD; + switchHistoryDict[streamId][type].reset(); + setPlaybackQuality(type, streamController.getActiveStreamInfo(), switchRequest.representation, switchRequest.reason, switchRequest.rule); + + clearTimeout(abandonmentTimeout); + abandonmentTimeout = setTimeout( + () => { + abandonmentStateDict[streamId][type].state = MetricsConstants.ALLOW_LOAD; + abandonmentTimeout = null; + }, + settings.get().streaming.abandonLoadTimeout + ); + } } /** - * Gets top BitrateInfo for the player - * @param {string} type - 'video' or 'audio' are the type options. - * @param {string} streamId - Id of the stream - * @returns {BitrateInfo | null} + * Update dropped frames history when the quality was changed + * @param {object} e + * @private */ - function getTopBitrateInfoFor(type, streamId = null) { - if (!streamId) { - streamId = streamController.getActiveStreamInfo().id; + function _onQualityChangeRendered(e) { + if (e.mediaType === Constants.VIDEO) { + if (playbackRepresentationId !== undefined) { + droppedFramesHistory.push(e.streamId, playbackRepresentationId, videoModel.getPlaybackQuality()); + } + playbackRepresentationId = e.newRepresentation.id; } - if (type && streamProcessorDict && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { - const idx = getMaxAllowedIndexFor(type, streamId); - const bitrates = getBitrateList(streamProcessorDict[streamId][type].getMediaInfo()); - return bitrates[idx] ? bitrates[idx] : null; + } + + /** + * When the buffer level is updated we check if we need to change the ABR strategy + * @param e + * @private + */ + function _onMetricAdded(e) { + if (_shouldApplyDynamicAbrStrategy() + && e.metric === MetricsConstants.BUFFER_LEVEL + && (e.mediaType === Constants.AUDIO || e.mediaType === Constants.VIDEO)) { + _updateDynamicAbrStrategy(e.mediaType, 0.001 * e.value.level); } - return null; } /** - * Returns the initial bitrate for a specific media type and stream id + * Returns the initial bitrate for a specific media type * @param {string} type - * @param {string} streamId * @returns {number} A value of the initial bitrate, kbps * @memberof AbrController# */ - function getInitialBitrateFor(type, streamId) { - checkConfig(); + function getInitialBitrateFor(type) { if (type === Constants.TEXT) { return NaN; } - const savedBitrate = domStorage.getSavedBitrateSettings(type); let configBitrate = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', type); - let configRatio = settings.get().streaming.abr.initialRepresentationRatio[type]; - - if (configBitrate === -1) { - if (configRatio > -1) { - const streamInfo = streamProcessorDict[streamId][type].getStreamInfo(); - const representation = adapter.getAdaptationForType(streamInfo.index, type, streamInfo).Representation; - if (Array.isArray(representation)) { - const repIdx = Math.max(Math.round(representation.length * configRatio) - 1, 0); - configBitrate = representation[repIdx].bandwidth / 1000; - } else { - configBitrate = 0; - } - } else if (!isNaN(savedBitrate)) { - configBitrate = savedBitrate; - } else { - configBitrate = (type === Constants.VIDEO) ? DEFAULT_VIDEO_BITRATE : DEFAULT_AUDIO_BITRATE; - } + if (configBitrate > 0) { + return configBitrate; } - return configBitrate; + let savedBitrate = NaN; + if (domStorage && domStorage.hasOwnProperty('getSavedBitrateSettings')) { + savedBitrate = domStorage.getSavedBitrateSettings(type); + } + if (!isNaN(savedBitrate)) { + return savedBitrate + } + + const averageThroughput = throughputController.getAverageThroughput(type); + if (!isNaN(averageThroughput) && averageThroughput > 0) { + return averageThroughput + } + + return (type === Constants.VIDEO) ? DEFAULT_VIDEO_BITRATE : DEFAULT_BITRATE; } /** @@ -585,7 +598,7 @@ function AbrController() { if (droppedFramesHistory) { const playbackQuality = videoModel.getPlaybackQuality(); if (playbackQuality) { - droppedFramesHistory.push(streamId, playbackIndex, playbackQuality); + droppedFramesHistory.push(streamId, playbackRepresentationId, playbackQuality); } } @@ -594,94 +607,62 @@ function AbrController() { return false; } - const oldQuality = getQualityFor(type, streamId); + const streamProcessor = streamProcessorDict[streamId][type]; + const currentRepresentation = streamProcessor.getRepresentation(); const rulesContext = RulesContext(context).create({ abrController: instance, throughputController, switchHistory: switchHistoryDict[streamId][type], droppedFramesHistory, - streamProcessor: streamProcessorDict[streamId][type], - currentValue: oldQuality, + streamProcessor, videoModel }); - const minIdx = getMinAllowedIndexFor(type, streamId); - const maxIdx = getMaxAllowedIndexFor(type, streamId); - const switchRequest = abrRulesCollection.getMaxQuality(rulesContext); - let newQuality = switchRequest.quality; + const switchRequest = abrRulesCollection.getBestPossibleSwitchRequest(rulesContext); - if (minIdx !== undefined && ((newQuality > SwitchRequest.NO_CHANGE) ? newQuality : oldQuality) < minIdx) { - newQuality = minIdx; - } - if (newQuality > maxIdx) { - newQuality = maxIdx; + if (!switchRequest || !switchRequest.representation) { + return false; } - switchHistoryDict[streamId][type].push({ oldValue: oldQuality, newValue: newQuality }); + let newRepresentation = switchRequest.representation; + switchHistoryDict[streamId][type].push({ + oldRepresentation: currentRepresentation, + newRepresentation + }); - if (newQuality > SwitchRequest.NO_CHANGE && newQuality !== oldQuality && (abandonmentStateDict[streamId][type].state === MetricsConstants.ALLOW_LOAD || newQuality < oldQuality)) { - _changeQuality(type, oldQuality, newQuality, maxIdx, switchRequest.reason, streamId); + if (newRepresentation.id !== currentRepresentation.id && (abandonmentStateDict[streamId][type].state === MetricsConstants.ALLOW_LOAD || newRepresentation.absoluteIndex < currentRepresentation.absoluteIndex)) { + _changeQuality(streamId, type, currentRepresentation, newRepresentation, switchRequest.reason, switchRequest.rule); return true; } return false; } catch (e) { + logger.error(e); return false; } } - /** - * Returns the current quality for a specific media type and a specific streamId - * @param {string} type - * @param {string} streamId - * @return {number|*} - */ - function getQualityFor(type, streamId = null) { - try { - if (!streamId) { - streamId = streamController.getActiveStreamInfo().id; - } - if (type && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { - let quality; - - if (streamId) { - qualityDict[streamId] = qualityDict[streamId] || {}; - - if (!qualityDict[streamId].hasOwnProperty(type)) { - qualityDict[streamId][type] = QUALITY_DEFAULT; - } - - quality = qualityDict[streamId][type]; - return quality; - } - } - return QUALITY_DEFAULT; - } catch (e) { - return QUALITY_DEFAULT; - } - } - /** * Sets the new playback quality. Starts from index 0. * If the index of the new quality is the same as the old one changeQuality will not be called. * @param {string} type * @param {object} streamInfo - * @param {number} newQuality + * @param {Representation} representation * @param {string} reason + * @param {string} rule */ - function setPlaybackQuality(type, streamInfo, newQuality, reason = null) { - if (!streamInfo || !streamInfo.id || !type) { + function setPlaybackQuality(type, streamInfo, representation, reason = null, rule = null) { + if (!streamInfo || !streamInfo.id || !type || !streamProcessorDict || !streamProcessorDict[streamInfo.id] || !streamProcessorDict[streamInfo.id][type] || !representation) { return; } - const streamId = streamInfo.id; - const oldQuality = getQualityFor(type, streamId); - checkInteger(newQuality); + const streamProcessor = streamProcessorDict[streamInfo.id][type]; + const streamId = streamInfo.id; + const currentRepresentation = streamProcessor.getRepresentation(); - const topQualityIdx = getMaxAllowedIndexFor(type, streamId); - if (newQuality !== oldQuality && newQuality >= 0 && newQuality <= topQualityIdx) { - _changeQuality(type, oldQuality, newQuality, topQualityIdx, reason, streamId); + if (!currentRepresentation || representation.id !== currentRepresentation.id) { + _changeQuality(streamId, type, currentRepresentation, representation, reason, rule); } } @@ -698,32 +679,30 @@ function AbrController() { /** * Changes the internal qualityDict values according to the new quality + * @param {string} streamId * @param {string} type - * @param {number} oldQuality - * @param {number} newQuality - * @param {number} maxIdx + * @param {Representation} oldRepresentation + * @param {Representation} newRepresentation * @param {string} reason - * @param {object} streamId * @private */ - function _changeQuality(type, oldQuality, newQuality, maxIdx, reason, streamId) { + function _changeQuality(streamId, type, oldRepresentation, newRepresentation, reason, rule) { if (type && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { const streamInfo = streamProcessorDict[streamId][type].getStreamInfo(); const bufferLevel = dashMetrics.getCurrentBufferLevel(type); - logger.info('Stream ID: ' + streamId + ' [' + type + '] switch from ' + oldQuality + ' to ' + newQuality + '/' + maxIdx + ' (buffer: ' + bufferLevel + ') ' + (reason ? JSON.stringify(reason) : '.')); + const isAdaptationSetSwitch = oldRepresentation !== null && !adapter.areMediaInfosEqual(oldRepresentation.mediaInfo, newRepresentation.mediaInfo); + + const oldBitrate = oldRepresentation ? oldRepresentation.bitrateInKbit : 0; + logger.info('Stream ID: ' + streamId + ' [' + type + '],' + (rule ? rule : '') + ' switch from bitrate ' + oldBitrate + ' to bitrate ' + newRepresentation.bitrateInKbit + ' (buffer: ' + bufferLevel + ') ' + (reason ? JSON.stringify(reason) : '.')); - qualityDict[streamId] = qualityDict[streamId] || {}; - qualityDict[streamId][type] = newQuality; - const bitrateInfo = _getBitrateInfoForQuality(streamId, type, newQuality); eventBus.trigger(Events.QUALITY_CHANGE_REQUESTED, { - oldQuality, - newQuality, + oldRepresentation: oldRepresentation, + newRepresentation: newRepresentation, reason, streamInfo, - bitrateInfo, - maxIdx, - mediaType: type + mediaType: type, + isAdaptationSetSwitch }, { streamId: streamInfo.id, mediaType: type } ); @@ -734,74 +713,6 @@ function AbrController() { } } - function _getBitrateInfoForQuality(streamId, type, idx) { - if (type && streamProcessorDict && streamProcessorDict[streamId] && streamProcessorDict[streamId][type]) { - const bitrates = getBitrateList(streamProcessorDict[streamId][type].getMediaInfo()); - return bitrates[idx] ? bitrates[idx] : null; - } - return null; - } - - /** - * @param {MediaInfo} mediaInfo - * @param {number} bitrate A bitrate value, kbps - * @param {String} streamId Period ID - * @param {number|null} latency Expected latency of connection, ms - * @returns {number} A quality index <= for the given bitrate - * @memberof AbrController# - */ - function getQualityForBitrate(mediaInfo, bitrate, streamId, latency = null) { - const voRepresentation = mediaInfo && mediaInfo.type ? streamProcessorDict[streamId][mediaInfo.type].getRepresentationInfo() : null; - - if (settings.get().streaming.abr.throughput.useDeadTimeLatency && latency && voRepresentation && voRepresentation.fragmentDuration) { - latency = latency / 1000; - const fragmentDuration = voRepresentation.fragmentDuration; - if (latency > fragmentDuration) { - return 0; - } else { - const deadTimeRatio = latency / fragmentDuration; - bitrate = bitrate * (1 - deadTimeRatio); - } - } - - const bitrateList = getBitrateList(mediaInfo); - - for (let i = bitrateList.length - 1; i >= 0; i--) { - const bitrateInfo = bitrateList[i]; - if (bitrate * 1000 >= bitrateInfo.bitrate) { - return i; - } - } - return QUALITY_DEFAULT; - } - - /** - * @param {MediaInfo} mediaInfo - * @returns {Array|null} A list of {@link BitrateInfo} objects - * @memberof AbrController# - */ - function getBitrateList(mediaInfo) { - const infoList = []; - if (!mediaInfo || !mediaInfo.bitrateList) return infoList; - - const bitrateList = mediaInfo.bitrateList; - const type = mediaInfo.type; - - let bitrateInfo; - - for (let i = 0, ln = bitrateList.length; i < ln; i++) { - bitrateInfo = new BitrateInfo(); - bitrateInfo.mediaType = type; - bitrateInfo.qualityIndex = i; - bitrateInfo.bitrate = bitrateList[i].bandwidth; - bitrateInfo.width = bitrateList[i].width; - bitrateInfo.height = bitrateList[i].height; - bitrateInfo.scanType = bitrateList[i].scanType; - infoList.push(bitrateInfo); - } - - return infoList; - } /** * If both BOLA and Throughput Rule are active we switch dynamically between both of them @@ -812,6 +723,12 @@ function AbrController() { return settings.get().streaming.abr.activeRules.bolaRule && settings.get().streaming.abr.activeRules.throughputRule } + /** + * Switch between BOLA and ThroughputRule + * @param mediaType + * @param bufferLevel + * @private + */ function _updateDynamicAbrStrategy(mediaType, bufferLevel) { try { const bufferTimeDefault = mediaPlayerModel.getBufferTimeDefault(); @@ -834,39 +751,35 @@ function AbrController() { } } - function updateTopQualityIndex(mediaInfo) { - const type = mediaInfo.type; - const streamId = mediaInfo.streamInfo.id; - const max = mediaInfo.representationCount - 1; - - topQualities[streamId] = topQualities[streamId] || {}; - topQualities[streamId][type] = max; + /** + * Checks if the provided Representation has the lowest possible quality + * @param representation + * @returns {boolean} + */ + function isPlayingAtLowestQuality(representation) { + const voRepresentations = getPossibleVoRepresentations(representation.mediaInfo, true); - return max; + return voRepresentations[0].id === representation.id } - function isPlayingAtTopQuality(streamInfo) { - const streamId = streamInfo ? streamInfo.id : null; - const audioQuality = getQualityFor(Constants.AUDIO, streamId); - const videoQuality = getQualityFor(Constants.VIDEO, streamId); + /** + * Checks if the provided Representation has the highest possible quality + * @param representation + * @returns {boolean} + */ + function isPlayingAtTopQuality(representation) { + if (!representation) { + return true; + } + const voRepresentations = getPossibleVoRepresentations(representation.mediaInfo, true); - return (audioQuality === getMaxAllowedIndexFor(Constants.AUDIO, streamId)) && - (videoQuality === getMaxAllowedIndexFor(Constants.VIDEO, streamId)); + return voRepresentations[voRepresentations.length - 1].id === representation.id; } function setWindowResizeEventCalled(value) { windowResizeEventCalled = value; } - function setElementSize() { - if (videoModel) { - const hasPixelRatio = settings.get().streaming.abr.usePixelRatioInLimitBitrateByPortal && window.hasOwnProperty('devicePixelRatio'); - const pixelRatio = hasPixelRatio ? window.devicePixelRatio : 1; - elementWidth = videoModel.getClientWidth() * pixelRatio; - elementHeight = videoModel.getClientHeight() * pixelRatio; - } - } - function clearDataForStream(streamId) { if (droppedFramesHistory) { droppedFramesHistory.clearForStream(streamId); @@ -884,26 +797,22 @@ function AbrController() { } instance = { - initialize, - isPlayingAtTopQuality, - updateTopQualityIndex, + checkPlaybackQuality, clearDataForStream, - getBitrateList, - getQualityForBitrate, - getTopBitrateInfoFor, - getMinAllowedIndexFor, - getMaxAllowedIndexFor, - getInitialBitrateFor, - getQualityFor, getAbandonmentStateFor, + getInitialBitrateFor, + getOptimalRepresentationForBitrate, + getPossibleVoRepresentations, + getRepresentationByAbsoluteIndex, + initialize, + isPlayingAtLowestQuality, + isPlayingAtTopQuality, + registerStreamType, + reset, + setConfig, setPlaybackQuality, - checkPlaybackQuality, - setElementSize, setWindowResizeEventCalled, - registerStreamType, unRegisterStreamType, - setConfig, - reset }; setup(); @@ -913,6 +822,5 @@ function AbrController() { AbrController.__dashjs_factory_name = 'AbrController'; const factory = FactoryMaker.getSingletonFactory(AbrController); -factory.QUALITY_DEFAULT = QUALITY_DEFAULT; FactoryMaker.updateSingletonFactory(AbrController.__dashjs_factory_name, factory); export default factory; diff --git a/src/streaming/controllers/BufferController.js b/src/streaming/controllers/BufferController.js index 0301e3b49d..02bfdd3381 100644 --- a/src/streaming/controllers/BufferController.js +++ b/src/streaming/controllers/BufferController.js @@ -57,9 +57,7 @@ function BufferController(config) { const errHandler = config.errHandler; const fragmentModel = config.fragmentModel; const representationController = config.representationController; - const adapter = config.adapter; const textController = config.textController; - const abrController = config.abrController; const playbackController = config.playbackController; const streamInfo = config.streamInfo; const type = config.type; @@ -162,16 +160,6 @@ function BufferController(config) { } - /** - * Get the RepresentationInfo for a certain quality. - * @param {number} quality - * @return {object} - * @private - */ - function _getRepresentationInfo(quality) { - return adapter.convertRepresentationToRepresentationInfo(representationController.getRepresentationForQuality(quality)); - } - /** * Creates a SourceBufferSink object * @param {object} mediaInfo @@ -208,9 +196,8 @@ function BufferController(config) { function _initializeSinkForPrebuffering() { return new Promise((resolve, reject) => { - const requiredQuality = abrController.getQualityFor(type, streamInfo.id); sourceBufferSink = PreBufferSink(context).create(_onAppended.bind(this)); - updateBufferTimestampOffset(_getRepresentationInfo(requiredQuality)) + updateBufferTimestampOffset(representationController.getCurrentRepresentation()) .then(() => { resolve(sourceBufferSink); }) @@ -222,15 +209,14 @@ function BufferController(config) { function _initializeSinkForMseBuffering(mediaInfo, oldBufferSinks) { return new Promise((resolve, reject) => { - const requiredQuality = abrController.getQualityFor(type, streamInfo.id); sourceBufferSink = SourceBufferSink(context).create({ mediaSource, textController, eventBus }); - _initializeSink(mediaInfo, oldBufferSinks, requiredQuality) + _initializeSink(mediaInfo, oldBufferSinks) .then(() => { - return updateBufferTimestampOffset(_getRepresentationInfo(requiredQuality)); + return updateBufferTimestampOffset(representationController.getCurrentRepresentation()); }) .then(() => { resolve(sourceBufferSink); @@ -243,13 +229,13 @@ function BufferController(config) { }) } - function _initializeSink(mediaInfo, oldBufferSinks, requiredQuality) { - const selectedRepresentation = _getRepresentationInfo(requiredQuality); + function _initializeSink(mediaInfo, oldBufferSinks) { + const selectedVoRepresentation = representationController.getCurrentRepresentation(); if (oldBufferSinks && oldBufferSinks[type] && (type === Constants.VIDEO || type === Constants.AUDIO)) { - return sourceBufferSink.initializeForStreamSwitch(mediaInfo, selectedRepresentation, oldBufferSinks[type]); + return sourceBufferSink.initializeForStreamSwitch(mediaInfo, selectedVoRepresentation, oldBufferSinks[type]); } else { - return sourceBufferSink.initializeForFirstUse(streamInfo, mediaInfo, selectedRepresentation); + return sourceBufferSink.initializeForFirstUse(streamInfo, mediaInfo, selectedVoRepresentation); } } @@ -274,7 +260,7 @@ function BufferController(config) { for (let j = 0; j < chunks.length; j++) { const chunk = chunks[j]; if (chunk.segmentType !== HTTPRequest.INIT_SEGMENT_TYPE) { - const initChunk = initCache.extract(chunk.streamId, chunk.representationId); + const initChunk = initCache.extract(chunk.streamId, chunk.representation.id); if (initChunk) { if (lastInit !== initChunk) { dischargeFragments.push(initChunk); @@ -303,7 +289,7 @@ function BufferController(config) { logger.info('Init fragment finished loading saving to', type + '\'s init cache'); initCache.save(e.chunk); } - logger.debug('Append Init fragment', type, ' with representationId:', e.chunk.representationId, ' and quality:', e.chunk.quality, ', data size:', e.chunk.bytes.byteLength); + logger.debug('Append Init fragment', type, ' with representationId:', e.chunk.representation.id, ' and quality:', e.chunk.quality, ', data size:', e.chunk.bytes.byteLength); _appendToBuffer(e.chunk); } @@ -322,7 +308,7 @@ function BufferController(config) { } // Append init segment into buffer - logger.info('Append Init fragment', type, ' with representationId:', chunk.representationId, ' and quality:', chunk.quality, ', data size:', chunk.bytes.byteLength); + logger.info('Append Init fragment', type, ' with representationId:', chunk.representation.id, ' and quality:', chunk.quality, ', data size:', chunk.bytes.byteLength); _appendToBuffer(chunk); return true; @@ -351,7 +337,7 @@ function BufferController(config) { _onAppended(e); }); - if (chunk.mediaInfo.type === Constants.VIDEO) { + if (chunk.representation.mediaInfo.type === Constants.VIDEO) { _triggerEvent(Events.VIDEO_CHUNK_RECEIVED, { chunk: chunk }); } } @@ -416,12 +402,12 @@ function BufferController(config) { if (appendedBytesInfo && !suppressAppendedEvent) { _triggerEvent(Events.BYTES_APPENDED_END_FRAGMENT, { - quality: appendedBytesInfo.quality, startTime: appendedBytesInfo.start, index: appendedBytesInfo.index, bufferedRanges: ranges, segmentType: appendedBytesInfo.segmentType, - mediaType: type + mediaType: type, + representationId: appendedBytesInfo.representation.id }); } } @@ -492,9 +478,6 @@ function BufferController(config) { } } - //********************************************************************** - // START Buffer Level, State & Sufficiency Handling. - //********************************************************************** function prepareForPlaybackSeek() { if (isBufferingCompleted) { setIsBufferingCompleted(false); @@ -504,21 +487,18 @@ function BufferController(config) { return sourceBufferSink.abort(); } - function prepareForReplacementTrackSwitch(codec) { + function prepareForForceReplacementQualitySwitch(voRepresentation) { return new Promise((resolve, reject) => { sourceBufferSink.abort() .then(() => { return updateAppendWindow(); }) .then(() => { - if (settings.get().streaming.buffer.useChangeTypeForTrackSwitch) { - return sourceBufferSink.changeType(codec); - } - - return Promise.resolve(); + return pruneAllSafely(); }) .then(() => { - return pruneAllSafely(); + // In any case we need to update the MSE.timeOffset + return updateBufferTimestampOffset(voRepresentation) }) .then(() => { setIsBufferingCompleted(false); @@ -530,18 +510,21 @@ function BufferController(config) { }); } - function prepareForForceReplacementQualitySwitch(representationInfo) { + function prepareForReplacementTrackSwitch(representation) { return new Promise((resolve, reject) => { sourceBufferSink.abort() .then(() => { return updateAppendWindow(); }) .then(() => { - return pruneAllSafely(); + if (settings.get().streaming.buffer.useChangeTypeForTrackSwitch) { + return sourceBufferSink.changeType(representation); + } + + return Promise.resolve(); }) .then(() => { - // In any case we need to update the MSE.timeOffset - return updateBufferTimestampOffset(representationInfo) + return pruneAllSafely(); }) .then(() => { setIsBufferingCompleted(false); @@ -553,12 +536,12 @@ function BufferController(config) { }); } - function prepareForNonReplacementTrackSwitch(codec) { + function prepareForNonReplacementTrackSwitch(selectedRepresentation) { return new Promise((resolve, reject) => { updateAppendWindow() .then(() => { if (settings.get().streaming.buffer.useChangeTypeForTrackSwitch) { - return sourceBufferSink.changeType(codec); + return sourceBufferSink.changeType(selectedRepresentation); } return Promise.resolve(); @@ -1048,15 +1031,15 @@ function BufferController(config) { } } - function updateBufferTimestampOffset(representationInfo) { + function updateBufferTimestampOffset(voRepresentation) { return new Promise((resolve) => { - if (!representationInfo || representationInfo.MSETimeOffset === undefined || !sourceBufferSink || !sourceBufferSink.updateTimestampOffset) { + if (!voRepresentation || voRepresentation.mseTimeOffset === undefined || !sourceBufferSink || !sourceBufferSink.updateTimestampOffset) { resolve(); return; } // Each track can have its own @presentationTimeOffset, so we should set the offset - // if it has changed after switching the quality or updating an mpd - sourceBufferSink.updateTimestampOffset(representationInfo.MSETimeOffset) + // if it has changed after switching the quality or updating an MPD + sourceBufferSink.updateTimestampOffset(voRepresentation.mseTimeOffset) .then(() => { resolve(); }) @@ -1237,36 +1220,36 @@ function BufferController(config) { } instance = { - initialize, - getStreamId, - getType, - getBufferControllerType, + appendInitSegmentFromCache, + clearBuffers, createBufferSink, dischargePreBuffer, + getAllRangesWithSafetyFactor, getBuffer, + getBufferControllerType, getBufferLevel, - getRangeAt, - hasBufferAtTime, - pruneBuffer, - setMediaSource, - getMediaSource, - appendInitSegmentFromCache, + getContinuousBufferTimeForTargetTime, getIsBufferingCompleted, - setIsBufferingCompleted, getIsPruningInProgress, - reset, + getMediaSource, + getRangeAt, + getStreamId, + getType, + hasBufferAtTime, + initialize, + prepareForForceReplacementQualitySwitch, + prepareForNonReplacementTrackSwitch, prepareForPlaybackSeek, prepareForReplacementTrackSwitch, - prepareForNonReplacementTrackSwitch, - prepareForForceReplacementQualitySwitch, - updateAppendWindow, - getAllRangesWithSafetyFactor, - getContinuousBufferTimeForTargetTime, - clearBuffers, pruneAllSafely, - updateBufferTimestampOffset, + pruneBuffer, + reset, + segmentRequestingCompleted, + setIsBufferingCompleted, + setMediaSource, setSeekTarget, - segmentRequestingCompleted + updateAppendWindow, + updateBufferTimestampOffset, }; setup(); diff --git a/src/streaming/controllers/FragmentController.js b/src/streaming/controllers/FragmentController.js index b4a9cd7cbc..c07fa2e671 100644 --- a/src/streaming/controllers/FragmentController.js +++ b/src/streaming/controllers/FragmentController.js @@ -118,7 +118,6 @@ function FragmentController(config) { const chunk = new DataChunk(); chunk.streamId = streamId; - chunk.mediaInfo = request.mediaInfo; chunk.segmentType = request.type; chunk.start = request.startTime; chunk.duration = request.duration; @@ -126,7 +125,7 @@ function FragmentController(config) { chunk.bytes = bytes; chunk.index = request.index; chunk.quality = request.quality; - chunk.representationId = request.representationId; + chunk.representation = request.representation; chunk.endFragment = endFragment; return chunk; @@ -139,10 +138,10 @@ function FragmentController(config) { const request = e.request; const bytes = e.response; const isInit = request.isInitializationRequest(); - const strInfo = request.mediaInfo.streamInfo; + const strInfo = request.representation.mediaInfo.streamInfo; if (e.error) { - if (request.mediaType === Constants.AUDIO || request.mediaType === Constants.VIDEO || (request.mediaType === Constants.TEXT && request.mediaInfo.isFragmented)) { + if (request.mediaType === Constants.AUDIO || request.mediaType === Constants.VIDEO || (request.mediaType === Constants.TEXT && request.representation.mediaInfo.isFragmented)) { // add service location to blacklist controller - only for audio or video. text should not set errors eventBus.trigger(Events.SERVICE_LOCATION_BASE_URL_BLACKLIST_ADD, { entry: e.request.serviceLocation }); } diff --git a/src/streaming/controllers/MediaController.js b/src/streaming/controllers/MediaController.js index 7f9eaa8898..7269c15fd7 100644 --- a/src/streaming/controllers/MediaController.js +++ b/src/streaming/controllers/MediaController.js @@ -35,6 +35,7 @@ import FactoryMaker from '../../core/FactoryMaker.js'; import Debug from '../../core/Debug.js'; import {bcp47Normalize} from 'bcp-47-normalize'; import {extendedFilter} from 'bcp-47-match'; +import MediaPlayerEvents from '../MediaPlayerEvents.js'; function MediaController() { @@ -47,7 +48,10 @@ function MediaController() { settings, initialSettings, lastSelectedTracks, + lastSelectedRepresentations, customParametersModel, + mediaPlayerModel, + videoModel, domStorage; function setup() { @@ -55,6 +59,56 @@ function MediaController() { reset(); } + function setConfig(config) { + if (!config) return; + + if (config.domStorage) { + domStorage = config.domStorage; + } + + if (config.settings) { + settings = config.settings; + } + + if (config.customParametersModel) { + customParametersModel = config.customParametersModel; + } + + if (config.mediaPlayerModel) { + mediaPlayerModel = config.mediaPlayerModel; + } + + if (config.videoModel) { + videoModel = config.videoModel; + } + } + + function initialize() { + _registerEvents(); + } + + function _registerEvents() { + eventBus.on(MediaPlayerEvents.REPRESENTATION_SWITCH, _onRepresentationSwitched, instance); + } + + function _unRegisterEvents() { + eventBus.off(MediaPlayerEvents.REPRESENTATION_SWITCH, _onRepresentationSwitched, instance); + } + + /** + * Save the last selected bitrate for each media type. In case we transition to a new period and have multiple AdaptationSets that we can choose + * from we choose the one with a bitrate closest to the current one. + * @param e + * @private + */ + function _onRepresentationSwitched(e) { + if (!e || !e.currentRepresentation || !e.currentRepresentation.mediaInfo || !e.currentRepresentation.mediaInfo.type) { + return + } + const type = e.currentRepresentation.mediaInfo.type; + lastSelectedRepresentations[type] = e.currentRepresentation; + } + /** * @param {string} type * @param {StreamInfo} streamInfo @@ -62,38 +116,45 @@ function MediaController() { */ function setInitialMediaSettingsForType(type, streamInfo) { let settings = lastSelectedTracks[type] || getInitialSettings(type); - const tracksForType = getTracksFor(type, streamInfo.id); - let tracks = []; + const possibleTracks = getTracksFor(type, streamInfo.id); + let filteredTracks = []; if (!settings) { settings = domStorage.getSavedMediaSettings(type); setInitialSettings(type, settings); } - if (!tracksForType || (tracksForType.length === 0)) return; + if (!possibleTracks || (possibleTracks.length === 0)) return; if (settings) { - tracks = Array.from(tracksForType); - logger.info('Filtering '+tracks.length+' '+type+' tracks based on settings'); + filteredTracks = Array.from(possibleTracks); + logger.info('Filtering ' + filteredTracks.length + ' ' + type + ' tracks based on settings'); - tracks = filterTracksBySettings(tracks, matchSettingsLang, settings); - tracks = filterTracksBySettings(tracks, matchSettingsIndex, settings); - tracks = filterTracksBySettings(tracks, matchSettingsViewPoint, settings); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsLang, settings); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsIndex, settings); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsViewPoint, settings); if (!(type === Constants.AUDIO && !!lastSelectedTracks[type])) { - tracks = filterTracksBySettings(tracks, matchSettingsRole, settings); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsRole, settings); } - tracks = filterTracksBySettings(tracks, matchSettingsAccessibility, settings); - tracks = filterTracksBySettings(tracks, matchSettingsAudioChannelConfig, settings); - logger.info('Filtering '+type+' tracks ended, found '+tracks.length+' matching track(s).'); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsAccessibility, settings); + filteredTracks = filterTracksBySettings(filteredTracks, matchSettingsAudioChannelConfig, settings); + logger.info('Filtering ' + type + ' tracks ended, found ' + filteredTracks.length + ' matching track(s).'); } - if (tracks.length === 0) { - setTrack(selectInitialTrack(type, tracksForType), true); - } else { - if (tracks.length > 1) { - setTrack(selectInitialTrack(type, tracks)); - } else { - setTrack(tracks[0]); + // We did not apply any filter. We can select from all possible tracks + if (filteredTracks.length === 0) { + setTrack(selectInitialTrack(type, possibleTracks)); + } + + // We have some tracks based on the filtering we did. + else { + // More than one possibility + if (filteredTracks.length > 1) { + setTrack(selectInitialTrack(type, filteredTracks)); + } + // Only one possibility use this one + else { + setTrack(filteredTracks[0]); } } } @@ -116,7 +177,7 @@ function MediaController() { const mediaTracks = tracks[streamId][mediaType].list; for (let i = 0, len = mediaTracks.length; i < len; ++i) { //track is already set. - if (isTracksEqual(mediaTracks[i], track)) { + if (areTracksEqual(mediaTracks[i], track)) { return; } } @@ -161,7 +222,7 @@ function MediaController() { const type = track.type; const id = track.streamInfo.id; - return (tracks[id] && tracks[id][type] && isTracksEqual(tracks[id][type].current, track)); + return (tracks[id] && tracks[id][type] && areTracksEqual(tracks[id][type].current, track)); } /** @@ -181,7 +242,7 @@ function MediaController() { tracks[id][type].current = track; - if (tracks[id][type].current && ((type !== Constants.TEXT && !isTracksEqual(track, current)) || (type === Constants.TEXT && track.isFragmented))) { + if (tracks[id][type].current && ((type !== Constants.TEXT && !areTracksEqual(track, current)) || (type === Constants.TEXT && track.isFragmented))) { eventBus.trigger(Events.CURRENT_TRACK_CHANGED, { oldMediaInfo: current, newMediaInfo: track, @@ -257,7 +318,7 @@ function MediaController() { * @returns {boolean} * @memberof MediaController# */ - function isTracksEqual(t1, t2) { + function areTracksEqual(t1, t2) { if (!t1 && !t2) { return true; } @@ -277,22 +338,6 @@ function MediaController() { return (sameId && sameCodec && sameViewpoint && sameLang && sameRoles && sameAccessibility && sameAudioChannelConfiguration); } - function setConfig(config) { - if (!config) return; - - if (config.domStorage) { - domStorage = config.domStorage; - } - - if (config.settings) { - settings = config.settings; - } - - if (config.customParametersModel) { - customParametersModel = config.customParametersModel; - } - } - /** * @memberof MediaController# @@ -300,7 +345,9 @@ function MediaController() { function reset() { tracks = {}; lastSelectedTracks = {}; + lastSelectedRepresentations = {}; resetInitialSettings(); + _unRegisterEvents(); } function extractSettings(mediaInfo) { @@ -327,7 +374,7 @@ function MediaController() { if (tracksAfterMatcher.length !== 0) { return tracksAfterMatcher; } else { - logger.info('Filter-Function ('+filterFn.name+') resulted in no tracks; setting ignored'); + logger.info('Filter-Function (' + filterFn.name + ') resulted in no tracks; setting ignored'); } return tracks; } @@ -515,42 +562,166 @@ function MediaController() { return result; } - function selectInitialTrack(type, tracks) { - if (type === Constants.TEXT) return tracks[0]; + function selectInitialTrack(type, mediaInfos) { + if (type === Constants.TEXT) return mediaInfos[0]; - let mode = settings.get().streaming.selectionModeForInitialTrack; let tmpArr; const customInitialTrackSelectionFunction = customParametersModel.getCustomInitialTrackSelectionFunction(); + tmpArr = _initialFilterMediaInfosByAllowedSettings(mediaInfos); + + // If we have a custom function that selects the track we use this one if (customInitialTrackSelectionFunction && typeof customInitialTrackSelectionFunction === 'function') { - tmpArr = customInitialTrackSelectionFunction(tracks); - } else { + tmpArr = customInitialTrackSelectionFunction(tmpArr); + } + + // If we know the current selected bitrate for the media type we select the AdaptationSet that comes closest to this. This should only be relevant for multiperiod when we transition to the next period. + else if (lastSelectedRepresentations[type]) { + tmpArr = _trackSelectionModeClosestBitrate(tmpArr, type) + } + + // Use the track selection function that is defined in the settings + else { + let mode = settings.get().streaming.selectionModeForInitialTrack; switch (mode) { case Constants.TRACK_SELECTION_MODE_HIGHEST_SELECTION_PRIORITY: - tmpArr = _trackSelectionModeHighestSelectionPriority(tracks); + tmpArr = _trackSelectionModeHighestSelectionPriority(tmpArr); break; case Constants.TRACK_SELECTION_MODE_HIGHEST_BITRATE: - tmpArr = _trackSelectionModeHighestBitrate(tracks); + tmpArr = _trackSelectionModeHighestBitrate(tmpArr); break; case Constants.TRACK_SELECTION_MODE_FIRST_TRACK: - tmpArr = _trackSelectionModeFirstTrack(tracks); + tmpArr = _trackSelectionModeFirstTrack(tmpArr); break; case Constants.TRACK_SELECTION_MODE_HIGHEST_EFFICIENCY: - tmpArr = _trackSelectionModeHighestEfficiency(tracks); + tmpArr = _trackSelectionModeHighestEfficiency(tmpArr); break; case Constants.TRACK_SELECTION_MODE_WIDEST_RANGE: - tmpArr = _trackSelectionModeWidestRange(tracks); + tmpArr = _trackSelectionModeWidestRange(tmpArr); break; default: logger.warn(`Track selection mode ${mode} is not supported. Falling back to TRACK_SELECTION_MODE_FIRST_TRACK`); - tmpArr = _trackSelectionModeFirstTrack(tracks); + tmpArr = _trackSelectionModeFirstTrack(tmpArr); break; } } - return tmpArr.length > 0 ? tmpArr[0] : tracks[0]; + return tmpArr.length > 0 ? tmpArr[0] : mediaInfos[0]; } + /** + * @param {MediaInfo[]} mediaInfos + * @return {MediaInfo[]} + */ + function _initialFilterMediaInfosByAllowedSettings(mediaInfos) { + try { + let tmpArr; + + tmpArr = _filterMediaInfosByPossibleBitrate(mediaInfos); + tmpArr = _filterMediaInfosByPortalSize(tmpArr); + + return tmpArr; + } catch (e) { + logger.error(e); + return mediaInfos + } + } + + /** + * Returns all MediaInfo objects that have at least one bitrate that fulfills the constraint. + * If all fail the constraint we return the original array. + * @param {MediaInfo[]} mediaInfos + * @return {MediaInfo[]} + */ + function _filterMediaInfosByPossibleBitrate(mediaInfos) { + try { + const filteredArray = mediaInfos.filter((mediaInfo) => { + const type = mediaInfo.type; + + return mediaInfo.bitrateList.some((bitrateInfo) => { + const maxBitrate = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', type); + const minBitrate = mediaPlayerModel.getAbrBitrateParameter('minBitrate', type); + + if (maxBitrate > -1 && bitrateInfo.bandwidth > maxBitrate * 1000) { + return false; + } + + return !(minBitrate > -1 && bitrateInfo.bandwidth < minBitrate * 1000); + }) + + }) + + if (filteredArray.length > 0) { + return filteredArray + } + + return mediaInfos + } catch (e) { + logger.error(e); + return mediaInfos + } + } + + /** + * @param {MediaInfo[]} mediaInfos + * @return {MediaInfo[]} + * @private + */ + function _filterMediaInfosByPortalSize(mediaInfos) { + try { + if (!settings.get().streaming.abr.limitBitrateByPortal) { + return mediaInfos; + } + + const { elementWidth } = videoModel.getVideoElementSize(); + + const filteredArray = mediaInfos.filter((mediaInfo) => { + return mediaInfo.type !== Constants.VIDEO || mediaInfo.bitrateList.some((bitrateInfo) => { + return bitrateInfo.width <= elementWidth + }); + }) + + if (filteredArray.length > 0) { + return filteredArray + } + + return mediaInfos + } catch (e) { + logger.error(e); + return mediaInfos + } + } + + /** + * Find the track that has a bitrate that matches the currenly selected one + * @param tracks + * @param type + * @returns {*} + * @private + */ + function _trackSelectionModeClosestBitrate(tracks, type) { + if (!tracks || tracks.length === 0 || !type || !lastSelectedRepresentations[type]) { + return tracks + } + + const targetBitrate = lastSelectedRepresentations[type].bandwidth; + if (!targetBitrate || isNaN(targetBitrate)) { + return tracks; + } + + let current = { min: NaN, track: null }; + tracks.forEach((track) => { + track.bitrateList.forEach((entry) => { + const diff = Math.abs(entry.bandwidth - targetBitrate); + if (isNaN(current.min) || diff < current.min) { + current.min = diff; + current.track = track; + } + }) + }) + + return current.track ? [current.track] : tracks + } function _trackSelectionModeHighestSelectionPriority(tracks) { let tmpArr = getTracksWithHighestSelectionPriority(tracks); @@ -600,7 +771,7 @@ function MediaController() { return tmpArr; } - function _compareDescriptorType(v1,v2) { + function _compareDescriptorType(v1, v2) { if (v1 && v2) { let t1 = JSON.stringify({ schemeIdUri: v1.schemeIdUri, @@ -643,29 +814,30 @@ function MediaController() { } instance = { - setInitialMediaSettingsForType, addTrack, - getTracksFor, + areTracksEqual, getCurrentTrackFor, - isCurrentTrack, - setTrack, - selectInitialTrack, - setInitialSettings, getInitialSettings, + getTracksFor, getTracksWithHighestBitrate, getTracksWithHighestEfficiency, getTracksWithWidestRange, - isTracksEqual, + initialize, + isCurrentTrack, matchSettings, - matchSettingsLang, - matchSettingsIndex, - matchSettingsViewPoint, - matchSettingsRole, matchSettingsAccessibility, matchSettingsAudioChannelConfig, + matchSettingsIndex, + matchSettingsLang, + matchSettingsRole, + matchSettingsViewPoint, + reset, saveTextSettingsDisabled, + selectInitialTrack, setConfig, - reset + setInitialMediaSettingsForType, + setInitialSettings, + setTrack, }; setup(); diff --git a/src/streaming/controllers/PlaybackController.js b/src/streaming/controllers/PlaybackController.js index b6c7195e93..8863838987 100644 --- a/src/streaming/controllers/PlaybackController.js +++ b/src/streaming/controllers/PlaybackController.js @@ -570,8 +570,8 @@ function PlaybackController() { } function _onDataUpdateCompleted(e) { - const representationInfo = adapter.convertRepresentationToRepresentationInfo(e.currentRepresentation); - const info = representationInfo ? representationInfo.mediaInfo.streamInfo : null; + const voRepresentation = e.currentRepresentation; + const info = voRepresentation ? voRepresentation.mediaInfo.streamInfo : null; if (info === null || streamInfo.id !== info.id) return; streamInfo = info; diff --git a/src/streaming/controllers/ScheduleController.js b/src/streaming/controllers/ScheduleController.js index fb6c9866d3..5ffc5ed5cf 100644 --- a/src/streaming/controllers/ScheduleController.js +++ b/src/streaming/controllers/ScheduleController.js @@ -60,8 +60,7 @@ function ScheduleController(config) { scheduleTimeout, hasVideoTrack, lastFragmentRequest, - topQualityIndex, - lastInitializedQuality, + lastInitializedRepresentationId, switchTrack, initSegmentRequired, checkPlaybackQuality; @@ -108,19 +107,6 @@ function ScheduleController(config) { } } - function hasTopQualityChanged() { - const streamId = streamInfo.id; - const newTopQualityIndex = abrController.getMaxAllowedIndexFor(type, streamId); - - if (isNaN(topQualityIndex) || topQualityIndex != newTopQualityIndex) { - logger.info('Top quality ' + type + ' index has changed from ' + topQualityIndex + ' to ' + newTopQualityIndex); - topQualityIndex = newTopQualityIndex; - return true; - } - return false; - - } - /** * Schedule the request for an init or a media segment */ @@ -156,18 +142,18 @@ function ScheduleController(config) { * @private */ function _getNextFragment() { - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); + const currentRepresentation = representationController.getCurrentRepresentation(); // A quality changed occured or we are switching the AdaptationSet. In that case we need to load a new init segment - if (initSegmentRequired || currentRepresentationInfo.quality !== lastInitializedQuality || switchTrack) { + if (initSegmentRequired || currentRepresentation.id !== lastInitializedRepresentationId || switchTrack) { if (switchTrack) { - logger.debug('Switch track for ' + type + ', representation id = ' + currentRepresentationInfo.id); + logger.debug('Switch track for ' + type + ', representation id = ' + currentRepresentation.id); switchTrack = false; } else { - logger.debug('Quality has changed, get init request for representationid = ' + currentRepresentationInfo.id); + logger.debug('Quality has changed, get init request for representationid = ' + currentRepresentation.id); } eventBus.trigger(Events.INIT_FRAGMENT_NEEDED, - { representationId: currentRepresentationInfo.id, sender: instance }, + { representationId: currentRepresentation.id, sender: instance }, { streamId: streamInfo.id, mediaType: type } ); checkPlaybackQuality = false; @@ -207,8 +193,8 @@ function ScheduleController(config) { */ function _shouldScheduleNextRequest() { try { - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); - return currentRepresentationInfo && (isNaN(lastInitializedQuality) || switchTrack || hasTopQualityChanged() || _shouldBuffer()); + const currentRepresentation = representationController.getCurrentRepresentation(); + return currentRepresentation && (lastInitializedRepresentationId == null || switchTrack || _shouldBuffer()); } catch (e) { return false; } @@ -220,8 +206,8 @@ function ScheduleController(config) { * @private */ function _shouldBuffer() { - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); - if (!type || !currentRepresentationInfo) { + const currentRepresentation = representationController.getCurrentRepresentation(); + if (!type || !currentRepresentation) { return true; } const bufferLevel = dashMetrics.getCurrentBufferLevel(type); @@ -234,9 +220,9 @@ function ScheduleController(config) { */ function getBufferTarget() { let bufferTarget = NaN; - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); + const currentRepresentation = representationController.getCurrentRepresentation(); - if (!type || !currentRepresentationInfo) { + if (!type || !currentRepresentation) { return bufferTarget; } @@ -259,14 +245,14 @@ function ScheduleController(config) { function _getBufferTargetForFragmentedText() { try { if (textController.isTextEnabled()) { - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); - if (isNaN(currentRepresentationInfo.fragmentDuration)) { //fragmentDuration of currentRepresentationInfo is not defined, + const currentRepresentation = representationController.getCurrentRepresentation(); + if (isNaN(currentRepresentation.fragmentDuration)) { // call metrics function to have data in the latest scheduling info... // if no metric, returns 0. In this case, rule will return false. const schedulingInfo = dashMetrics.getCurrentSchedulingInfo(MetricsConstants.SCHEDULING_INFO); return schedulingInfo ? schedulingInfo.duration : 0; } else { - return currentRepresentationInfo.fragmentDuration; + return currentRepresentation.fragmentDuration; } } else { // text is disabled, rule will return false return 0; @@ -284,15 +270,15 @@ function ScheduleController(config) { function _getBufferTargetForAudio() { try { const videoBufferLevel = dashMetrics.getCurrentBufferLevel(Constants.VIDEO); - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); + const currentRepresentation = representationController.getCurrentRepresentation(); // For multiperiod we need to consider that audio and video segments might have different durations. // This can lead to scenarios in which we completely buffered the video segments and the video buffer level for the current period is not changing anymore. However we might still need a small audio segment to finish buffering audio as well. // If we set the buffer time of audio equal to the video buffer time scheduling for the remaining audio segment will only be triggered when audio fragmentDuration > videoBufferLevel. That will delay preloading of the upcoming period. // Should find a better solution than just adding 1 - if (isNaN(currentRepresentationInfo.fragmentDuration)) { + if (isNaN(currentRepresentation.fragmentDuration)) { return videoBufferLevel + 1; } else { - return Math.max(videoBufferLevel + 1, currentRepresentationInfo.fragmentDuration); + return Math.max(videoBufferLevel + 1, currentRepresentation.fragmentDuration); } } catch (e) { return 0; @@ -300,15 +286,15 @@ function ScheduleController(config) { } /** - * Determines the generic buffer target, for instance for video tracks + * Determines the generic buffer target, for instance for video tracks or when we got an audio only stream * @return {number} * @private */ function _getGenericBufferTarget() { try { - const currentRepresentationInfo = representationController.getCurrentRepresentationInfo(); - const streamInfo = currentRepresentationInfo.mediaInfo.streamInfo; - if (abrController.isPlayingAtTopQuality(streamInfo)) { + const currentRepresentation = representationController.getCurrentRepresentation(); + const streamInfo = currentRepresentation.mediaInfo.streamInfo; + if (abrController.isPlayingAtTopQuality(currentRepresentation)) { const isLongFormContent = streamInfo.manifestInfo.duration >= settings.get().streaming.buffer.longFormContentDurationThreshold; return isLongFormContent ? settings.get().streaming.buffer.bufferTimeAtTopQualityLongForm : settings.get().streaming.buffer.bufferTimeAtTopQuality; } else { @@ -340,34 +326,30 @@ function ScheduleController(config) { })[0]; if (item && playbackController.getTime() >= item.startTime) { - if ((!lastFragmentRequest.mediaInfo || (item.mediaInfo.type === lastFragmentRequest.mediaInfo.type && item.mediaInfo.index !== lastFragmentRequest.mediaInfo.index)) && trigger) { + if ((!lastFragmentRequest.representation || (item.representation.mediaInfo.type === lastFragmentRequest.representation.mediaInfo.type && item.representation.mediaInfo.index !== lastFragmentRequest.representation.mediaInfo.index)) && trigger) { + logger.debug(`Track change rendered for streamId ${streamInfo.id} and type ${type}`); eventBus.trigger(Events.TRACK_CHANGE_RENDERED, { mediaType: type, - oldMediaInfo: lastFragmentRequest.mediaInfo, - newMediaInfo: item.mediaInfo, + oldMediaInfo: lastFragmentRequest && lastFragmentRequest.representation && lastFragmentRequest.representation.mediaInfo ? lastFragmentRequest.representation.mediaInfo : null, + newMediaInfo: item.representation.mediaInfo, streamId: streamInfo.id }); } - if ((item.quality !== lastFragmentRequest.quality || item.adaptationIndex !== lastFragmentRequest.adaptationIndex) && trigger) { + if ((!lastFragmentRequest.representation || (item.representation.id !== lastFragmentRequest.representation.id)) && trigger) { logger.debug(`Quality change rendered for streamId ${streamInfo.id} and type ${type}`); eventBus.trigger(Events.QUALITY_CHANGE_RENDERED, { mediaType: type, - oldQuality: lastFragmentRequest.quality, - newQuality: item.quality, + oldRepresentation: lastFragmentRequest.representation ? lastFragmentRequest.representation : null, + newRepresentation: item.representation, streamId: streamInfo.id }); } - lastFragmentRequest = { - mediaInfo: item.mediaInfo, - quality: item.quality, - adaptationIndex: item.adaptationIndex - }; + lastFragmentRequest.representation = item.representation } } } - function _onURLResolutionFailed() { fragmentModel.abortRequests(); clearScheduleTimer(); @@ -399,20 +381,17 @@ function ScheduleController(config) { initSegmentRequired = value; } - function setLastInitializedQuality(value) { - lastInitializedQuality = value; + function setLastInitializedRepresentationId(value) { + lastInitializedRepresentationId = value; } function resetInitialSettings() { checkPlaybackQuality = true; timeToLoadDelay = 0; - lastInitializedQuality = NaN; + lastInitializedRepresentationId = null; lastFragmentRequest = { - mediaInfo: undefined, - quality: NaN, - adaptationIndex: NaN + representation: null, }; - topQualityIndex = NaN; switchTrack = false; initSegmentRequired = false; } @@ -448,7 +427,7 @@ function ScheduleController(config) { getPlaybackController, setCheckPlaybackQuality, setInitSegmentRequired, - setLastInitializedQuality, + setLastInitializedRepresentationId, }; setup(); diff --git a/src/streaming/controllers/StreamController.js b/src/streaming/controllers/StreamController.js index 9fc8e4e434..d3db560123 100644 --- a/src/streaming/controllers/StreamController.js +++ b/src/streaming/controllers/StreamController.js @@ -204,7 +204,7 @@ function StreamController() { * @private */ function _onTimeSyncCompleted( /*e*/) { - _composeStreams(); + _composePeriods(); } /** @@ -219,7 +219,7 @@ function StreamController() { * Setup the stream objects after the stream start and each MPD reload. This function is called after the UTC sync has been done (TIME_SYNCHRONIZATION_COMPLETED) * @private */ - function _composeStreams() { + function _composePeriods() { try { const streamsInfo = adapter.getStreamsInfo(); @@ -652,8 +652,10 @@ function StreamController() { try { // Seamless period switch allowed only if: // - none of the periods uses contentProtection. - // - AND changeType method implemented by browser or periods use the same codec. - return (settings.get().streaming.buffer.reuseExistingSourceBuffers && (previousStream.isProtectionCompatible(nextStream) || firstLicenseIsFetched) && (supportsChangeType && settings.get().streaming.buffer.useChangeTypeForTrackSwitch || previousStream.isMediaCodecCompatible(nextStream, previousStream))); + // - AND changeType method is implemented + return (settings.get().streaming.buffer.reuseExistingSourceBuffers + && (capabilities.isProtectionCompatible(previousStream, nextStream) || firstLicenseIsFetched) + && (supportsChangeType && settings.get().streaming.buffer.useChangeTypeForTrackSwitch)); } catch (e) { return false; } @@ -756,7 +758,7 @@ function StreamController() { } /** - * When the quality is changed in the currently active stream and we do an aggressive replacement we must stop prebuffering. This is similar to a replacing track switch + * When the quality is changed in the currently active stream, and we do an aggressive replacement we must stop prebuffering. This is similar to a replacing track switch * Otherwise preloading can go on. * @param e * @private @@ -1219,56 +1221,14 @@ function StreamController() { */ function _getFragmentDurationForLiveDelayCalculation(streamInfos, manifestInfo) { try { - let fragmentDuration = NaN; + let segmentDuration = NaN; // We use the maxFragmentDuration attribute if present if (manifestInfo && !isNaN(manifestInfo.maxFragmentDuration) && isFinite(manifestInfo.maxFragmentDuration)) { return manifestInfo.maxFragmentDuration; } - // For single period manifests we can iterate over all AS and use the maximum segment length - if (streamInfos && streamInfos.length === 1) { - const streamInfo = streamInfos[0]; - const mediaTypes = [Constants.VIDEO, Constants.AUDIO, Constants.TEXT]; - - - const fragmentDurations = mediaTypes - .reduce((acc, mediaType) => { - const mediaInfo = adapter.getMediaInfoForType(streamInfo, mediaType); - - if (mediaInfo && mediaInfo.isFragmented !== false) { - acc.push(mediaInfo); - } - - return acc; - }, []) - .reduce((acc, mediaInfo) => { - const voRepresentations = adapter.getVoRepresentations(mediaInfo); - - if (voRepresentations && voRepresentations.length > 0) { - voRepresentations.forEach((voRepresentation) => { - if (voRepresentation) { - acc.push(voRepresentation); - } - }); - } - - return acc; - }, []) - .reduce((acc, voRepresentation) => { - const representation = adapter.convertRepresentationToRepresentationInfo(voRepresentation); - - if (representation && representation.fragmentDuration && !isNaN(representation.fragmentDuration)) { - acc.push(representation.fragmentDuration); - } - - return acc; - }, []); - - fragmentDuration = Math.max(...fragmentDurations); - } - - return isFinite(fragmentDuration) ? fragmentDuration : NaN; + return isFinite(segmentDuration) ? segmentDuration : NaN; } catch (e) { return NaN; } @@ -1276,33 +1236,33 @@ function StreamController() { /** * Callback handler after the manifest has been updated. Trigger an update in the adapter and filter unsupported stuff. - * Finally attempt UTC sync + * Finally, attempt UTC sync * @param {object} e * @private */ function _onManifestUpdated(e) { if (!e.error) { logger.info('Manifest updated... updating data system wide.'); + //Since streams are not composed yet , need to manually look up useCalculatedLiveEdgeTime to detect if stream //is SegmentTimeline to avoid using time source const manifest = e.manifest; adapter.updatePeriods(manifest); - let manifestUTCTimingSources = adapter.getUTCTimingSources(); - - if (adapter.getIsDynamic() && (!manifestUTCTimingSources || manifestUTCTimingSources.length === 0)) { - eventBus.trigger(MediaPlayerEvents.CONFORMANCE_VIOLATION, { - level: ConformanceViolationConstants.LEVELS.WARNING, - event: ConformanceViolationConstants.EVENTS.NO_UTC_TIMING_ELEMENT - }); - } - - let allUTCTimingSources = (!adapter.getIsDynamic()) ? manifestUTCTimingSources : manifestUTCTimingSources.concat(customParametersModel.getUTCTimingSources()); - // It is important to filter before initializing the baseUrlController. Otherwise we might end up with wrong references in case we remove AdaptationSets. capabilitiesFilter.filterUnsupportedFeatures(manifest) .then(() => { baseURLController.initialize(manifest); + + let manifestUTCTimingSources = adapter.getUTCTimingSources(); + if (adapter.getIsDynamic() && (!manifestUTCTimingSources || manifestUTCTimingSources.length === 0)) { + eventBus.trigger(MediaPlayerEvents.CONFORMANCE_VIOLATION, { + level: ConformanceViolationConstants.LEVELS.WARNING, + event: ConformanceViolationConstants.EVENTS.NO_UTC_TIMING_ELEMENT + }); + } + + let allUTCTimingSources = (!adapter.getIsDynamic()) ? manifestUTCTimingSources : manifestUTCTimingSources.concat(customParametersModel.getUTCTimingSources()); timeSyncController.attemptSync(allUTCTimingSources, adapter.getIsDynamic()); }); } else { @@ -1603,27 +1563,27 @@ function StreamController() { } instance = { - initialize, - getActiveStreamInfo, addDVRMetric, - hasVideoTrack, - hasAudioTrack, + getActiveStream, + getActiveStreamInfo, + getActiveStreamProcessors, + getAutoPlay, + getHasMediaOrInitialisationError, + getInitialPlayback, + getIsStreamSwitchInProgress, getStreamById, getStreamForTime, + getStreams, getTimeRelativeToStreamId, + hasAudioTrack, + hasVideoTrack, + initialize, load, loadWithManifest, - getActiveStreamProcessors, + reset, setConfig, setProtectionData, - getIsStreamSwitchInProgress, switchToVideoElement, - getHasMediaOrInitialisationError, - getStreams, - getActiveStream, - getInitialPlayback, - getAutoPlay, - reset }; setup(); diff --git a/src/streaming/models/AastLowLatencyThroughputModel.js b/src/streaming/models/AastLowLatencyThroughputModel.js index bab75ae7db..0ccd1b5a26 100644 --- a/src/streaming/models/AastLowLatencyThroughputModel.js +++ b/src/streaming/models/AastLowLatencyThroughputModel.js @@ -228,7 +228,7 @@ function AastLowLatencyThroughputModel() { if (request && request.mediaType && !measurements[request.mediaType]) { measurements[request.mediaType] = []; } - const bitrateEntry = request.mediaInfo.bitrateList.find(item => item.id === request.representationId); + const bitrateEntry = request.representation.mediaInfo.bitrateList.find(item => item.id === request.representation.id); measurements[request.mediaType].push({ index: request.index, repId: request.representationId, @@ -239,7 +239,7 @@ function AastLowLatencyThroughputModel() { chunksDurationMS: chunkMeasurements.reduce((prev, curr) => prev + curr.chunkDownloadDurationMS, 0), segmentBytes: chunkMeasurements.reduce((prev, curr) => prev + curr.chunkBytes, 0), bitrate: bitrateEntry && bitrateEntry.bandwidth, - bitrateList: request.mediaInfo.bitrateList, + bitrateList: request.representation.mediaInfo.bitrateList, chunkMeasurements, fetchDownloadDurationMS, throughputCapacityDelayMS, diff --git a/src/streaming/models/CmcdModel.js b/src/streaming/models/CmcdModel.js index d01727c687..065260e481 100644 --- a/src/streaming/models/CmcdModel.js +++ b/src/streaming/models/CmcdModel.js @@ -194,7 +194,6 @@ function CmcdModel() { function _getCmcdData(request) { try { let cmcdData = null; - if (request.type === HTTPRequest.MPD_TYPE) { return _getCmcdDataForMpd(request); } else if (request.type === HTTPRequest.MEDIA_SEGMENT_TYPE) { @@ -237,7 +236,7 @@ function CmcdModel() { const mtp = _getMeasuredThroughputByType(request.mediaType); const dl = _getDeadlineByType(request.mediaType); const bl = _getBufferLevelByType(request.mediaType); - const tb = _getTopBitrateByType(request.mediaType); + const tb = _getTopBitrateByType(request.representation.mediaInfo); const pr = internalData.pr; const nextRequest = _probeNextRequest(request.mediaType); @@ -246,7 +245,7 @@ function CmcdModel() { if (request.mediaType === Constants.VIDEO) ot = CmcdObjectType.VIDEO; if (request.mediaType === Constants.AUDIO) ot = CmcdObjectType.AUDIO; if (request.mediaType === Constants.TEXT) { - if (request.mediaInfo.mimeType === 'application/mp4') { + if (request.representation.mediaInfo.mimeType === 'application/mp4') { ot = CmcdObjectType.TIMED_TEXT; } else { ot = CmcdObjectType.CAPTION; @@ -379,19 +378,18 @@ function CmcdModel() { function _getBitrateByRequest(request) { try { - const quality = request.quality; - const bitrateList = request.mediaInfo.bitrateList; - - return parseInt(bitrateList[quality].bandwidth / 1000); + return parseInt(request.bandwidth / 1000); } catch (e) { return null; } } - function _getTopBitrateByType(mediaType) { + function _getTopBitrateByType(mediaInfo) { try { - const info = abrController.getTopBitrateInfoFor(mediaType); - return Math.round(info.bitrate / 1000); + const bitrates = abrController.getPossibleVoRepresentations(mediaInfo).map((rep) => { + return rep.bitrateInKbit + }); + return Math.max(...bitrates) } catch (e) { return null; } @@ -508,17 +506,17 @@ function CmcdModel() { // Get the values we need let playbackRate = playbackController.getPlaybackRate(); if (!playbackRate) playbackRate = 1; - let {quality, mediaType, mediaInfo, duration} = request; + let { bandwidth, mediaType, representation, duration } = request; + const mediaInfo = representation.mediaInfo if (!mediaInfo) { return NaN; } let currentBufferLevel = _getBufferLevelByType(mediaType); if (currentBufferLevel === 0) currentBufferLevel = 500; - let bitrate = mediaInfo.bitrateList[quality].bandwidth; // Calculate RTP - let segmentSize = (bitrate * duration) / 1000; // Calculate file size in kilobits + let segmentSize = (bandwidth * duration) / 1000; // Calculate file size in kilobits let timeToLoad = (currentBufferLevel / playbackRate) / 1000; // Calculate time available to load file in seconds let minBandwidth = segmentSize / timeToLoad; // Calculate the exact bandwidth required let rtpSafetyFactor = settings.get().streaming.cmcd.rtpSafetyFactor && !isNaN(settings.get().streaming.cmcd.rtpSafetyFactor) ? settings.get().streaming.cmcd.rtpSafetyFactor : RTP_SAFETY_FACTOR; diff --git a/src/streaming/models/FragmentModel.js b/src/streaming/models/FragmentModel.js index f7262810cb..efdc1b728b 100644 --- a/src/streaming/models/FragmentModel.js +++ b/src/streaming/models/FragmentModel.js @@ -79,7 +79,7 @@ function FragmentModel(config) { }; const isEqualInit = function (req1, req2) { - return isNaN(req1.index) && isNaN(req2.index) && (req1.quality === req2.quality); + return req1.representation.id === req2.representation.id; }; const check = function (requests) { @@ -331,24 +331,19 @@ function FragmentModel(config) { resetInitialSettings(); } - function addExecutedRequest(request) { - executedRequests.push(request); - } - instance = { + abortRequests, + executeRequest, + getRequests, getStreamId, getType, - getRequests, isFragmentLoaded, isFragmentLoadedOrPending, - removeExecutedRequestsBeforeTime, removeExecutedRequestsAfterTime, - syncExecutedRequestsWithBufferedRange, - abortRequests, - executeRequest, + removeExecutedRequestsBeforeTime, reset, resetInitialSettings, - addExecutedRequest + syncExecutedRequestsWithBufferedRange, }; setup(); diff --git a/src/streaming/models/MetricsModel.js b/src/streaming/models/MetricsModel.js index 3a739041ef..1d8b5a2229 100644 --- a/src/streaming/models/MetricsModel.js +++ b/src/streaming/models/MetricsModel.js @@ -266,7 +266,7 @@ function MetricsModel(config) { pushAndNotify(mediaType, MetricsConstants.DROPPED_FRAMES, vo); } - function addSchedulingInfo(mediaType, t, type, startTime, availabilityStartTime, duration, quality, range, state) { + function addSchedulingInfo(mediaType, t, type, startTime, availabilityStartTime, duration, bandwidth, range, state) { let vo = new SchedulingInfo(); vo.mediaType = mediaType; @@ -276,7 +276,7 @@ function MetricsModel(config) { vo.startTime = startTime; vo.availabilityStartTime = availabilityStartTime; vo.duration = duration; - vo.quality = quality; + vo.bandwidth = bandwidth; vo.range = range; vo.state = state; @@ -294,19 +294,13 @@ function MetricsModel(config) { metricAdded(mediaType, MetricsConstants.REQUESTS_QUEUE, vo); } - function addManifestUpdate(mediaType, type, requestTime, fetchTime, availabilityStartTime, presentationStartTime, clientTimeOffset, currentTime, buffered, latency) { + function addManifestUpdate(mediaType, type, requestTime, fetchTime) { let vo = new ManifestUpdate(); vo.mediaType = mediaType; vo.type = type; vo.requestTime = requestTime; // when this manifest update was requested vo.fetchTime = fetchTime; // when this manifest update was received - vo.availabilityStartTime = availabilityStartTime; - vo.presentationStartTime = presentationStartTime; // the seek point (liveEdge for dynamic, Stream[0].startTime for static) - vo.clientTimeOffset = clientTimeOffset; // the calculated difference between the server and client wall clock time - vo.currentTime = currentTime; // actual element.currentTime - vo.buffered = buffered; // actual element.ranges - vo.latency = latency; // (static is fixed value of zero. dynamic should be ((Now-@availabilityStartTime) - currentTime) pushMetrics(Constants.STREAM, MetricsConstants.MANIFEST_UPDATE, vo); metricAdded(mediaType, MetricsConstants.MANIFEST_UPDATE, vo); @@ -336,17 +330,15 @@ function MetricsModel(config) { } } - function addManifestUpdateRepresentationInfo(manifestUpdate, id, index, streamIndex, mediaType, presentationTimeOffset, startNumber, fragmentInfoType) { + function addManifestUpdateRepresentationInfo(manifestUpdate, representation, mediaType) { if (manifestUpdate && manifestUpdate.representationInfo) { const vo = new ManifestUpdateRepresentationInfo(); - vo.id = id; - vo.index = index; - vo.streamIndex = streamIndex; + vo.id = representation ? representation.id : null; + vo.index = representation ? representation.index : null; vo.mediaType = mediaType; - vo.startNumber = startNumber; - vo.fragmentInfoType = fragmentInfoType; - vo.presentationTimeOffset = presentationTimeOffset; + vo.startNumber = representation ? representation.startNumber : null; + vo.presentationTimeOffset = representation ? representation.presentationTimeOffset : null; manifestUpdate.representationInfo.push(vo); metricUpdated(manifestUpdate.mediaType, MetricsConstants.MANIFEST_UPDATE_TRACK_INFO, manifestUpdate); diff --git a/src/streaming/models/VideoModel.js b/src/streaming/models/VideoModel.js index 7f8b54352e..70542af902 100644 --- a/src/streaming/models/VideoModel.js +++ b/src/streaming/models/VideoModel.js @@ -34,6 +34,7 @@ import EventBus from '../../core/EventBus.js'; import Events from '../../core/events/Events.js'; import Debug from '../../core/Debug.js'; import Constants from '../constants/Constants.js'; +import Settings from '../../core/Settings.js'; const READY_STATES_TO_EVENT_NAMES = new Map([ @@ -58,6 +59,7 @@ function VideoModel() { const context = this.context; const eventBus = EventBus(context).getInstance(); + const settings = Settings(context).getInstance(); const stalledStreams = []; function setup() { @@ -449,46 +451,59 @@ function VideoModel() { addEventListener(event, func); } + function getVideoElementSize() { + const hasPixelRatio = settings.get().streaming.abr.usePixelRatioInLimitBitrateByPortal && window.hasOwnProperty('devicePixelRatio'); + const pixelRatio = hasPixelRatio ? window.devicePixelRatio : 1; + const elementWidth = getClientWidth() * pixelRatio; + const elementHeight = getClientHeight() * pixelRatio; + + return { + elementWidth, + elementHeight + } + } + instance = { - initialize, - setCurrentTime, - play, - isPaused, - pause, - isStalled, - isSeeking, - getTime, + addEventListener, + addTextTrack, + appendChild, + getBufferRange, + getClientHeight, + getClientWidth, + getElement, + getEnded, + getPlaybackQuality, getPlaybackRate, - setPlaybackRate, getPlayedRanges, - getEnded, - setStallState, - getElement, - setElement, - setSource, + getReadyState, getSource, getTTMLRenderingDiv, - setTTMLRenderingDiv, - getVttRenderingDiv, - setVttRenderingDiv, - getPlaybackQuality, - addEventListener, - removeEventListener, - getReadyState, - getBufferRange, - getClientWidth, - getClientHeight, - getTextTracks, getTextTrack, - addTextTrack, - appendChild, - removeChild, - getVideoWidth, + getTextTracks, + getTime, + getVideoElementSize, getVideoHeight, - getVideoRelativeOffsetTop, getVideoRelativeOffsetLeft, + getVideoRelativeOffsetTop, + getVideoWidth, + getVttRenderingDiv, + initialize, + isPaused, + isSeeking, + isStalled, + pause, + play, + removeChild, + removeEventListener, + reset, + setCurrentTime, + setElement, + setPlaybackRate, + setSource, + setStallState, + setTTMLRenderingDiv, + setVttRenderingDiv, waitForReadyState, - reset }; setup(); diff --git a/src/streaming/rules/DroppedFramesHistory.js b/src/streaming/rules/DroppedFramesHistory.js index c554c0bc4d..e35ae4ef86 100644 --- a/src/streaming/rules/DroppedFramesHistory.js +++ b/src/streaming/rules/DroppedFramesHistory.js @@ -6,9 +6,9 @@ function DroppedFramesHistory() { let lastDroppedFrames = {}; let lastTotalFrames = {}; - function push(streamId, index, playbackQuality) { + function push(streamId, representationId, playbackQuality) { - if (!index) { + if (!representationId) { return; } @@ -28,14 +28,16 @@ function DroppedFramesHistory() { lastTotalFrames[streamId] = totalVideoFrames; const current = values[streamId]; - if (!isNaN(index)) { - if (!current[index]) { - current[index] = { droppedVideoFrames: intervalDroppedFrames, totalVideoFrames: intervalTotalFrames }; - } else { - current[index].droppedVideoFrames += intervalDroppedFrames; - current[index].totalVideoFrames += intervalTotalFrames; - } + if (!current[representationId]) { + current[representationId] = { + droppedVideoFrames: intervalDroppedFrames, + totalVideoFrames: intervalTotalFrames + }; + } else { + current[representationId].droppedVideoFrames += intervalDroppedFrames; + current[representationId].totalVideoFrames += intervalTotalFrames; } + } function getFrameHistory(streamId) { diff --git a/src/streaming/rules/RulesContext.js b/src/streaming/rules/RulesContext.js index cfbf96418b..09ded55895 100644 --- a/src/streaming/rules/RulesContext.js +++ b/src/streaming/rules/RulesContext.js @@ -41,7 +41,7 @@ function RulesContext(config) { const droppedFramesHistory = config.droppedFramesHistory; const currentRequest = config.currentRequest; const scheduleController = config.streamProcessor ? config.streamProcessor.getScheduleController() : null; - const representationInfo = config.streamProcessor ? config.streamProcessor.getRepresentationInfo() : null; + const voRepresentation = config.streamProcessor ? config.streamProcessor.getRepresentation() : null; const videoModel = config.videoModel ? config.videoModel : null; function getMediaType() { @@ -55,11 +55,11 @@ function RulesContext(config) { } function getMediaInfo() { - return representationInfo ? representationInfo.mediaInfo : null; + return voRepresentation ? voRepresentation.mediaInfo : null; } - function getRepresentationInfo() { - return representationInfo; + function getRepresentation() { + return voRepresentation; } function getScheduleController() { @@ -91,17 +91,17 @@ function RulesContext(config) { } instance = { - getMediaType, - getMediaInfo, - getDroppedFramesHistory, + getAbrController, getCurrentRequest, - getSwitchHistory, - getStreamInfo, + getDroppedFramesHistory, + getMediaInfo, + getMediaType, + getRepresentation, getScheduleController, + getStreamInfo, + getSwitchHistory, getThroughputController, - getAbrController, - getRepresentationInfo, - getVideoModel + getVideoModel, }; return instance; diff --git a/src/streaming/rules/SwitchRequest.js b/src/streaming/rules/SwitchRequest.js index bd9b035b50..fb3760bf2d 100644 --- a/src/streaming/rules/SwitchRequest.js +++ b/src/streaming/rules/SwitchRequest.js @@ -31,19 +31,20 @@ import FactoryMaker from '../../core/FactoryMaker.js'; -const NO_CHANGE = -1; +const NO_CHANGE = null; const PRIORITY = { DEFAULT: 0.5, STRONG: 1, WEAK: 0 }; -function SwitchRequest(q, r, p) { - //TODO refactor all the calls to this to use config to be like everything else. +function SwitchRequest(rep, reas, prio, r) { + let instance, - quality, + representation, priority, - reason; + reason, + rule; // check priority value function getPriority(p) { @@ -57,14 +58,16 @@ function SwitchRequest(q, r, p) { } // init attributes - quality = (q === undefined) ? NO_CHANGE : q; - priority = getPriority(p); - reason = (r === undefined) ? null : r; + representation = (rep === undefined) ? NO_CHANGE : rep; + priority = getPriority(prio); + reason = (reas === undefined) ? null : reas; + rule = r === undefined ? null : r; instance = { - quality: quality, - reason: reason, - priority: priority + representation, + reason, + rule, + priority }; return instance; diff --git a/src/streaming/rules/SwitchRequestHistory.js b/src/streaming/rules/SwitchRequestHistory.js index 4fcaa32d02..4242e2c4bc 100644 --- a/src/streaming/rules/SwitchRequestHistory.js +++ b/src/streaming/rules/SwitchRequestHistory.js @@ -30,42 +30,47 @@ */ import FactoryMaker from '../../core/FactoryMaker.js'; -import SwitchRequest from './SwitchRequest.js'; const SWITCH_REQUEST_HISTORY_DEPTH = 8; // must be > SwitchHistoryRule SAMPLE_SIZE to enable rule function SwitchRequestHistory() { - let switchRequests = []; // running total + let switchRequests = {}; // running total let srHistory = []; // history of each switch function push(switchRequest) { - if (switchRequest.newValue === SwitchRequest.NO_CHANGE) { - switchRequest.newValue = switchRequest.oldValue; - } - if (!switchRequests[switchRequest.oldValue]) { - switchRequests[switchRequest.oldValue] = {noDrops: 0, drops: 0, dropSize: 0}; + if (!switchRequests[switchRequest.oldRepresentation.id]) { + switchRequests[switchRequest.oldRepresentation.id] = { + noDrops: 0, + drops: 0, + dropSize: 0 + }; } // Set switch details - let indexDiff = switchRequest.newValue - switchRequest.oldValue; + let indexDiff = switchRequest.newRepresentation.absoluteIndex - switchRequest.oldRepresentation.absoluteIndex; let drop = (indexDiff < 0) ? 1 : 0; let dropSize = drop ? -indexDiff : 0; let noDrop = drop ? 0 : 1; // Update running totals - switchRequests[switchRequest.oldValue].drops += drop; - switchRequests[switchRequest.oldValue].dropSize += dropSize; - switchRequests[switchRequest.oldValue].noDrops += noDrop; + switchRequests[switchRequest.oldRepresentation.id].drops += drop; + switchRequests[switchRequest.oldRepresentation.id].dropSize += dropSize; + switchRequests[switchRequest.oldRepresentation.id].noDrops += noDrop; // Save to history - srHistory.push({idx: switchRequest.oldValue, noDrop: noDrop, drop: drop, dropSize: dropSize}); + srHistory.push({ + id: switchRequest.oldRepresentation.id, + noDrop: noDrop, + drop: drop, + dropSize: dropSize + }); - // Shift earliest switch off srHistory and readjust to keep depth of running totals constant - if ( srHistory.length > SWITCH_REQUEST_HISTORY_DEPTH ) { + // Shift the earliest switch off srHistory and readjust to keep depth of running totals constant + if (srHistory.length > SWITCH_REQUEST_HISTORY_DEPTH) { let srHistoryFirst = srHistory.shift(); - switchRequests[srHistoryFirst.idx].drops -= srHistoryFirst.drop; - switchRequests[srHistoryFirst.idx].dropSize -= srHistoryFirst.dropSize; - switchRequests[srHistoryFirst.idx].noDrops -= srHistoryFirst.noDrop; + switchRequests[srHistoryFirst.id].drops -= srHistoryFirst.drop; + switchRequests[srHistoryFirst.id].dropSize -= srHistoryFirst.dropSize; + switchRequests[srHistoryFirst.id].noDrops -= srHistoryFirst.noDrop; } } @@ -79,9 +84,9 @@ function SwitchRequestHistory() { } return { - push: push, - getSwitchRequests: getSwitchRequests, - reset: reset + push, + getSwitchRequests, + reset }; } diff --git a/src/streaming/rules/abr/ABRRulesCollection.js b/src/streaming/rules/abr/ABRRulesCollection.js index cae208ecd6..6870aff918 100644 --- a/src/streaming/rules/abr/ABRRulesCollection.js +++ b/src/streaming/rules/abr/ABRRulesCollection.js @@ -181,7 +181,7 @@ function ABRRulesCollection(config) { } function _getRulesWithChange(srArray) { - return srArray.filter(sr => sr.quality > SwitchRequest.NO_CHANGE); + return srArray.filter(sr => sr.representation !== SwitchRequest.NO_CHANGE); } /** @@ -194,50 +194,43 @@ function ABRRulesCollection(config) { let newSwitchReq = null; let i, len, - req, - quality, - reason; + currentSwitchRequest; if (srArray.length === 0) { return; } - values[SwitchRequest.PRIORITY.STRONG] = { quality: SwitchRequest.NO_CHANGE, reason: null }; - values[SwitchRequest.PRIORITY.WEAK] = { quality: SwitchRequest.NO_CHANGE, reason: null }; - values[SwitchRequest.PRIORITY.DEFAULT] = { quality: SwitchRequest.NO_CHANGE, reason: null }; + values[SwitchRequest.PRIORITY.STRONG] = null; + values[SwitchRequest.PRIORITY.WEAK] = null; + values[SwitchRequest.PRIORITY.DEFAULT] = null; for (i = 0, len = srArray.length; i < len; i += 1) { - req = srArray[i]; - if (req.quality !== SwitchRequest.NO_CHANGE) { - // We only use the new quality in case it is lower than the already saved one or if no new quality has been selected for the respective priority - if (values[req.priority].quality === SwitchRequest.NO_CHANGE || values[req.priority].quality > req.quality) { - values[req.priority].quality = req.quality; - values[req.priority].reason = req.reason || null; + currentSwitchRequest = srArray[i]; + if (currentSwitchRequest.representation !== SwitchRequest.NO_CHANGE) { + // We only use the new quality in case the bitrate is lower than the already saved one or if no new quality has been selected for the respective priority + if (values[currentSwitchRequest.priority] === null || + (values[currentSwitchRequest.priority].representation !== SwitchRequest.NO_CHANGE && currentSwitchRequest.representation.bitrateInKbit < values[currentSwitchRequest.priority].representation.bitrateInKbit)) { + values[currentSwitchRequest.priority] = currentSwitchRequest; } } } - if (values[SwitchRequest.PRIORITY.WEAK].quality !== SwitchRequest.NO_CHANGE) { + if (values[SwitchRequest.PRIORITY.WEAK] && values[SwitchRequest.PRIORITY.WEAK].representation !== SwitchRequest.NO_CHANGE) { newSwitchReq = values[SwitchRequest.PRIORITY.WEAK]; } - if (values[SwitchRequest.PRIORITY.DEFAULT].quality !== SwitchRequest.NO_CHANGE) { + if (values[SwitchRequest.PRIORITY.DEFAULT] && values[SwitchRequest.PRIORITY.DEFAULT].representation !== SwitchRequest.NO_CHANGE) { newSwitchReq = values[SwitchRequest.PRIORITY.DEFAULT]; } - if (values[SwitchRequest.PRIORITY.STRONG].quality !== SwitchRequest.NO_CHANGE) { + if (values[SwitchRequest.PRIORITY.STRONG] && values[SwitchRequest.PRIORITY.STRONG].representation !== SwitchRequest.NO_CHANGE) { newSwitchReq = values[SwitchRequest.PRIORITY.STRONG]; } - if (newSwitchReq) { - quality = newSwitchReq.quality; - reason = newSwitchReq.reason; - } - - return SwitchRequest(context).create(quality, reason); + return newSwitchReq } - function getMaxQuality(rulesContext) { + function getBestPossibleSwitchRequest(rulesContext) { if (!rulesContext) { return SwitchRequest(context).create() } @@ -252,18 +245,25 @@ function ABRRulesCollection(config) { return (shouldUseBolaRuleByMediaType[mediaType] && ruleName === Constants.QUALITY_SWITCH_RULES.BOLA_RULE) || (!shouldUseBolaRuleByMediaType[mediaType] && ruleName === Constants.QUALITY_SWITCH_RULES.THROUGHPUT_RULE) }) - const switchRequestArray = activeQualitySwitchRules.map(rule => rule.getMaxIndex(rulesContext)); + const switchRequestArray = activeQualitySwitchRules.map(rule => rule.getSwitchRequest(rulesContext)); const activeRules = _getRulesWithChange(switchRequestArray); const maxQuality = getMinSwitchRequest(activeRules); return maxQuality || SwitchRequest(context).create(); } - function shouldAbandonFragment(rulesContext, streamId) { - const abandonRequestArray = abandonFragmentRules.map(rule => rule.shouldAbandon(rulesContext, streamId)); + function shouldAbandonFragment(rulesContext) { + if (!rulesContext) { + return SwitchRequest(context).create() + } + const abandonRequestArray = abandonFragmentRules.map(rule => rule.shouldAbandon(rulesContext)); const activeRules = _getRulesWithChange(abandonRequestArray); const shouldAbandon = getMinSwitchRequest(activeRules); + if (shouldAbandon) { + shouldAbandon.reason.forceAbandon = true + } + return shouldAbandon || SwitchRequest(context).create(); } @@ -300,15 +300,15 @@ function ABRRulesCollection(config) { } instance = { - initialize, - reset, - getMaxQuality, + getAbandonFragmentRules, + getBestPossibleSwitchRequest, + getBolaState, getMinSwitchRequest, - shouldAbandonFragment, getQualitySwitchRules, - getAbandonFragmentRules, + initialize, + reset, setBolaState, - getBolaState + shouldAbandonFragment, }; return instance; diff --git a/src/streaming/rules/abr/AbandonRequestsRule.js b/src/streaming/rules/abr/AbandonRequestsRule.js index 1eba54f112..b317d73b9c 100644 --- a/src/streaming/rules/abr/AbandonRequestsRule.js +++ b/src/streaming/rules/abr/AbandonRequestsRule.js @@ -35,14 +35,13 @@ import Debug from '../../../core/Debug.js'; function AbandonRequestsRule(config) { config = config || {}; + const mediaPlayerModel = config.mediaPlayerModel; + const dashMetrics = config.dashMetrics; const ABANDON_MULTIPLIER = 1.8; const GRACE_TIME_THRESHOLD = 500; const MIN_LENGTH_TO_AVERAGE = 5; const context = this.context; - const mediaPlayerModel = config.mediaPlayerModel; - const dashMetrics = config.dashMetrics; - const settings = config.settings; let instance, logger, @@ -55,19 +54,21 @@ function AbandonRequestsRule(config) { reset(); } - function setFragmentRequestDict(type, id) { + function _setFragmentRequestDict(type, id) { fragmentDict[type] = fragmentDict[type] || {}; fragmentDict[type][id] = fragmentDict[type][id] || {}; } - function storeLastRequestThroughputByType(type, throughput) { + function _storeLastRequestThroughputByType(type, throughput) { throughputArray[type] = throughputArray[type] || []; throughputArray[type].push(throughput); } function shouldAbandon(rulesContext) { + const switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); + try { - const switchRequest = SwitchRequest(context).create(SwitchRequest.NO_CHANGE, { name: AbandonRequestsRule.__dashjs_factory_name }); if (!rulesContext) { return switchRequest @@ -75,16 +76,14 @@ function AbandonRequestsRule(config) { const mediaInfo = rulesContext.getMediaInfo(); const mediaType = rulesContext.getMediaType(); - const streamInfo = rulesContext.getStreamInfo(); - const streamId = streamInfo ? streamInfo.id : null; const req = rulesContext.getCurrentRequest(); if (!isNaN(req.index)) { - setFragmentRequestDict(mediaType, req.index); + _setFragmentRequestDict(mediaType, req.index); - const bufferTimeDefault = mediaPlayerModel.getBufferTimeDefault(); + const stableBufferTime = mediaPlayerModel.getBufferTimeDefault(); const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); - if (bufferLevel > bufferTimeDefault) { + if ( bufferLevel > stableBufferTime ) { return switchRequest; } @@ -93,7 +92,7 @@ function AbandonRequestsRule(config) { return switchRequest; } - //setup some init info based on first progress event + // setup some init info based on first progress event if (fragmentInfo.firstByteTime === undefined) { throughputArray[mediaType] = []; fragmentInfo.firstByteTime = req.firstByteDate.getTime(); @@ -104,42 +103,49 @@ function AbandonRequestsRule(config) { fragmentInfo.bytesLoaded = req.bytesLoaded; fragmentInfo.elapsedTime = new Date().getTime() - fragmentInfo.firstByteTime; + // Store throughput for each progress event we are getting, see ABR controller if (fragmentInfo.bytesLoaded > 0 && fragmentInfo.elapsedTime > 0) { - storeLastRequestThroughputByType(mediaType, Math.round(fragmentInfo.bytesLoaded * 8 / fragmentInfo.elapsedTime)); + _storeLastRequestThroughputByType(mediaType, Math.round(fragmentInfo.bytesLoaded * 8 / fragmentInfo.elapsedTime)); } + // Activate rule once we have enough samples and initial startup time has elapsed if (throughputArray[mediaType].length >= MIN_LENGTH_TO_AVERAGE && fragmentInfo.elapsedTime > GRACE_TIME_THRESHOLD && fragmentInfo.bytesLoaded < fragmentInfo.bytesTotal) { + const requestedRepresentation = req.representation; const totalSampledValue = throughputArray[mediaType].reduce((a, b) => a + b, 0); fragmentInfo.measuredBandwidthInKbps = Math.round(totalSampledValue / throughputArray[mediaType].length); fragmentInfo.estimatedTimeOfDownload = +((fragmentInfo.bytesTotal * 8 / fragmentInfo.measuredBandwidthInKbps) / 1000).toFixed(2); - if (fragmentInfo.estimatedTimeOfDownload < fragmentInfo.segmentDuration * ABANDON_MULTIPLIER || rulesContext.getRepresentationInfo().quality === 0) { + // We do not abandon if the estimated download time is below a threshold, or we are on the lowest quality anyway. + const representation = rulesContext.getRepresentation(); + const abrController = rulesContext.getAbrController(); + if (fragmentInfo.estimatedTimeOfDownload < fragmentInfo.segmentDuration * ABANDON_MULTIPLIER || abrController.isPlayingAtLowestQuality(representation)) { return switchRequest; - } else if (!abandonDict.hasOwnProperty(fragmentInfo.id)) { + } + if (!abandonDict.hasOwnProperty(fragmentInfo.id)) { const abrController = rulesContext.getAbrController(); const bytesRemaining = fragmentInfo.bytesTotal - fragmentInfo.bytesLoaded; - const bitrateList = abrController.getBitrateList(mediaInfo); - const quality = abrController.getQualityForBitrate(mediaInfo, fragmentInfo.measuredBandwidthInKbps * settings.get().streaming.abr.throughput.bandwidthSafetyFactor, streamId); - const minQuality = abrController.getMinAllowedIndexFor(mediaType, streamId); - const newQuality = (minQuality !== undefined) ? Math.max(minQuality, quality) : quality; - const estimateOtherBytesTotal = fragmentInfo.bytesTotal * bitrateList[newQuality].bitrate / bitrateList[abrController.getQualityFor(mediaType, streamId)].bitrate; - - if (bytesRemaining > estimateOtherBytesTotal) { - switchRequest.quality = newQuality; - switchRequest.reason.throughput = fragmentInfo.measuredBandwidthInKbps; - switchRequest.reason.fragmentID = fragmentInfo.id; - switchRequest.reason.rule = this.getClassName(); - switchRequest.reason.forceAbandon = true + const newRepresentation = abrController.getOptimalRepresentationForBitrate(mediaInfo, fragmentInfo.measuredBandwidthInKbps, true); + const estimatedBytesForNewPresentation = fragmentInfo.bytesTotal * newRepresentation.bitrateInKbit / requestedRepresentation.bitrateInKbit; + + if (bytesRemaining > estimatedBytesForNewPresentation) { + switchRequest.representation = newRepresentation; + switchRequest.reason = { + throughput: fragmentInfo.measuredBandwidthInKbps, + fragmentID: fragmentInfo.id + } abandonDict[fragmentInfo.id] = fragmentInfo; - logger.debug('[' + mediaType + '] frag id', fragmentInfo.id, ' is asking to abandon and switch to quality to ', newQuality, ' measured bandwidth was', fragmentInfo.measuredBandwidthInKbps); + logger.debug('[' + mediaType + '] frag id', fragmentInfo.id, ' is asking to abandon and switch to quality to ', newRepresentation.absoluteIndex, ' measured bandwidth was', fragmentInfo.measuredBandwidthInKbps); delete fragmentDict[mediaType][fragmentInfo.id]; } } - } else if (fragmentInfo.bytesLoaded === fragmentInfo.bytesTotal) { + } + + // Done loading we can delete the fragment from the dict + else if (fragmentInfo.bytesLoaded === fragmentInfo.bytesTotal) { delete fragmentDict[mediaType][fragmentInfo.id]; } } @@ -147,7 +153,7 @@ function AbandonRequestsRule(config) { return switchRequest; } catch (e) { logger.error(e); - SwitchRequest(context).create(SwitchRequest.NO_CHANGE, { name: AbandonRequestsRule.__dashjs_factory_name }); + return switchRequest } } diff --git a/src/streaming/rules/abr/BolaRule.js b/src/streaming/rules/abr/BolaRule.js index 73dc40ba14..887f324a87 100644 --- a/src/streaming/rules/abr/BolaRule.js +++ b/src/streaming/rules/abr/BolaRule.js @@ -40,14 +40,15 @@ import Events from '../../../core/events/Events.js'; import Debug from '../../../core/Debug.js'; import MediaPlayerEvents from '../../MediaPlayerEvents.js'; import Constants from '../../constants/Constants.js'; +import AbrController from '../../controllers/AbrController.js'; // BOLA_STATE_ONE_BITRATE : If there is only one bitrate (or initialization failed), always return NO_CHANGE. // BOLA_STATE_STARTUP : Set placeholder buffer such that we download fragments at most recently measured throughput. // BOLA_STATE_STEADY : Buffer primed, we switch to steady operation. // TODO: add BOLA_STATE_SEEK and tune BOLA behavior on seeking -const BOLA_STATE_ONE_BITRATE = 0; -const BOLA_STATE_STARTUP = 1; -const BOLA_STATE_STEADY = 2; +const BOLA_STATE_ONE_BITRATE = 'BOLA_STATE_ONE_BITRATE'; +const BOLA_STATE_STARTUP = 'BOLA_STATE_STARTUP'; +const BOLA_STATE_STEADY = 'BOLA_STATE_STEADY'; const MINIMUM_BUFFER_S = 10; // BOLA should never add artificial delays if buffer is less than MINIMUM_BUFFER_S. const MINIMUM_BUFFER_PER_BITRATE_LEVEL_S = 2; @@ -64,6 +65,7 @@ function BolaRule(config) { const dashMetrics = config.dashMetrics; const mediaPlayerModel = config.mediaPlayerModel; const eventBus = EventBus(context).getInstance(); + const abrController = AbrController(context).getInstance(); let instance, logger, @@ -72,72 +74,149 @@ function BolaRule(config) { function setup() { logger = Debug(context).getInstance().getLogger(instance); resetInitialSettings(); - - eventBus.on(MediaPlayerEvents.BUFFER_EMPTY, onBufferEmpty, instance); - eventBus.on(MediaPlayerEvents.PLAYBACK_SEEKING, onPlaybackSeeking, instance); - eventBus.on(MediaPlayerEvents.METRIC_ADDED, onMetricAdded, instance); - eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChangeRequested, instance); - eventBus.on(MediaPlayerEvents.FRAGMENT_LOADING_ABANDONED, onFragmentLoadingAbandoned, instance); - eventBus.on(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); + eventBus.on(MediaPlayerEvents.BUFFER_EMPTY, _onBufferEmpty, instance); + eventBus.on(MediaPlayerEvents.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); + eventBus.on(MediaPlayerEvents.METRIC_ADDED, _onMetricAdded, instance); + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, _onQualityChangeRequested, instance); + eventBus.on(MediaPlayerEvents.FRAGMENT_LOADING_ABANDONED, _onFragmentLoadingAbandoned, instance); + eventBus.on(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); } - function utilitiesFromBitrates(bitrates) { - return bitrates.map(b => Math.log(b)); - // no need to worry about offset, utilities will be offset (uniformly) anyway later + /** + * If we rebuffer, we don't want the placeholder buffer to artificially raise BOLA quality + * @param {object} e + * @private + */ + function _onBufferEmpty(e) { + const mediaType = e.mediaType; + const streamId = e.streamId; + // if audio buffer runs empty (due to track switch for example) then reset placeholder buffer only for audio (to avoid decrease video BOLA quality) + const stateDict = mediaType === Constants.AUDIO ? [Constants.AUDIO] : bolaStateDict[streamId]; + for (const mediaType in stateDict) { + if (bolaStateDict[streamId] && bolaStateDict[streamId].hasOwnProperty(mediaType) && bolaStateDict[streamId][mediaType].state === BOLA_STATE_STEADY) { + bolaStateDict[streamId][mediaType].placeholderBuffer = 0; + } + } } - // NOTE: in live streaming, the real buffer level can drop below minimumBufferS, but bola should not stick to lowest bitrate by using a placeholder buffer level - function calculateBolaParameters(bufferTimeDefault, bitrates, utilities) { - const highestUtilityIndex = utilities.reduce((highestIndex, u, uIndex) => (u > utilities[highestIndex] ? uIndex : highestIndex), 0); - - if (highestUtilityIndex === 0) { - // if highestUtilityIndex === 0, then always use lowest bitrate - return null; + /** + * Clear BOLA parameters for each media type once we seek. By setting to BOLA_STATE_STARTUP we use the throughput to get the possible quality. + * @private + */ + function _onPlaybackSeeking(e) { + // TODO: 1. Verify what happens if we seek mid-fragment. + // TODO: 2. If e.g. we have 10s fragments and seek, we might want to download the first fragment at a lower quality to restart playback quickly. + const streamId = e.streamId + for (const mediaType in bolaStateDict[streamId]) { + if (bolaStateDict[streamId].hasOwnProperty(mediaType)) { + const bolaState = bolaStateDict[streamId][mediaType]; + if (bolaState.state !== BOLA_STATE_ONE_BITRATE) { + bolaState.state = BOLA_STATE_STARTUP; // TODO: BOLA_STATE_SEEK? + _clearBolaStateOnSeek(bolaState); + } + } } + } - const bufferTime = Math.max(bufferTimeDefault, MINIMUM_BUFFER_S + MINIMUM_BUFFER_PER_BITRATE_LEVEL_S * bitrates.length); - - // TODO: Investigate if following can be better if utilities are not the default Math.log utilities. - // If using Math.log utilities, we can choose Vp and gp to always prefer bitrates[0] at minimumBufferS and bitrates[max] at bufferTarget. - // (Vp * (utility + gp) - bufferLevel) / bitrate has the maxima described when: - // Vp * (utilities[0] + gp - 1) === minimumBufferS and Vp * (utilities[max] + gp - 1) === bufferTarget - // giving: - const gp = (utilities[highestUtilityIndex] - 1) / (bufferTime / MINIMUM_BUFFER_S - 1); - const Vp = MINIMUM_BUFFER_S / gp; - // note that expressions for gp and Vp assume utilities[0] === 1, which is true because of normalization + /** + * Handle situations in which the downloaded quality differs from what the BOLA algorithm recommended + * @param e + * @private + */ + function _onMetricAdded(e) { + if (e && e.metric === MetricsConstants.HTTP_REQUEST && e.value && e.value.type === HTTPRequest.MEDIA_SEGMENT_TYPE && e.value.trace && e.value.trace.length) { + const bolaState = bolaStateDict[e.streamId] && bolaStateDict[e.streamId][e.mediaType] ? bolaStateDict[e.streamId][e.mediaType] : null; + if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { + bolaState.lastSegmentRequestTimeMs = e.value.trequest.getTime(); + bolaState.lastSegmentFinishTimeMs = e.value._tfinish.getTime(); + _checkNewSegment(bolaState, e.mediaType); + } + } + } - return { gp: gp, Vp: Vp }; + /** + * Useful to store change requests when abandoning a download. + * @param e + * @private + */ + function _onQualityChangeRequested(e) { + if (e && bolaStateDict[e.streamId] && bolaStateDict[e.streamId][e.mediaType]) { + const bolaState = bolaStateDict[e.streamId][e.mediaType]; + if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { + bolaState.currentRepresentation = e.newRepresentation; + } + } } - function getInitialBolaState(rulesContext) { + /** + * + * @param rulesContext + * @returns {{}} + * @private + */ + function _getInitialBolaState(rulesContext) { const initialState = {}; const mediaInfo = rulesContext.getMediaInfo(); - const bitrates = mediaInfo.bitrateList.map(b => b.bandwidth); - let utilities = utilitiesFromBitrates(bitrates); + const representations = abrController.getPossibleVoRepresentations(mediaInfo, true); + const bitrates = representations.map(r => r.bandwidth); + let utilities = bitrates.map(b => Math.log(b)); utilities = utilities.map(u => u - utilities[0] + 1); // normalize const bufferTimeDefault = mediaPlayerModel.getBufferTimeDefault(); - const params = calculateBolaParameters(bufferTimeDefault, bitrates, utilities); + const params = _calculateBolaParameters(bufferTimeDefault, representations, utilities); + // only happens when there is only one bitrate level if (!params) { - // only happens when there is only one bitrate level initialState.state = BOLA_STATE_ONE_BITRATE; } else { initialState.state = BOLA_STATE_STARTUP; - - initialState.bitrates = bitrates; + initialState.representations = representations; initialState.utilities = utilities; initialState.bufferTimeDefault = bufferTimeDefault; initialState.Vp = params.Vp; initialState.gp = params.gp; - - initialState.lastQuality = 0; - clearBolaStateOnSeek(initialState); + initialState.currentRepresentation = null; + _clearBolaStateOnSeek(initialState); } return initialState; } - function clearBolaStateOnSeek(bolaState) { + /** + * NOTE: in live streaming, the real buffer level can drop below minimumBufferS, but bola should not stick to lowest bitrate by using a placeholder buffer level + * @param bufferTimeDefault + * @param representations + * @param utilities + * @returns {{gp: number, Vp: number}|null} + * @private + */ + function _calculateBolaParameters(bufferTimeDefault, representations, utilities) { + const highestUtilityIndex = utilities.reduce((highestIndex, u, uIndex) => (u > utilities[highestIndex] ? uIndex : highestIndex), 0); + + // if highestUtilityIndex === 0, then always use lowest bitrate + if (highestUtilityIndex === 0) { + return null; + } + + const bufferTime = Math.max(bufferTimeDefault, MINIMUM_BUFFER_S + MINIMUM_BUFFER_PER_BITRATE_LEVEL_S * representations.length); + + // TODO: Investigate if following can be better if utilities are not the default Math.log utilities. + // If using Math.log utilities, we can choose Vp and gp to always prefer bitrates[0] at minimumBufferS and bitrates[max] at bufferTarget. + // (Vp * (utility + gp) - bufferLevel) / bitrate has the maxima described when: + // Vp * (utilities[0] + gp - 1) === minimumBufferS and Vp * (utilities[max] + gp - 1) === bufferTarget + // giving: + const gp = (utilities[highestUtilityIndex] - 1) / (bufferTime / MINIMUM_BUFFER_S - 1); + const Vp = MINIMUM_BUFFER_S / gp; + // note that expressions for gp and Vp assume utilities[0] === 1, which is true because of normalization + + return { gp: gp, Vp: Vp }; + } + + /** + * + * @param bolaState + * @private + */ + function _clearBolaStateOnSeek(bolaState) { bolaState.placeholderBuffer = 0; bolaState.mostAdvancedSegmentStart = NaN; bolaState.lastSegmentWasReplacement = false; @@ -147,11 +226,16 @@ function BolaRule(config) { bolaState.lastSegmentFinishTimeMs = NaN; } - // If the buffer target is changed (can this happen mid-stream?), then adjust BOLA parameters accordingly. - function checkBolaStateBufferTimeDefault(bolaState, mediaType) { + /** + * If the buffer target is changed (can this happen mid-stream?), then adjust BOLA parameters accordingly. + * @param bolaState + * @param mediaType + * @private + */ + function _checkBolaStateBufferTimeDefault(bolaState, mediaType) { const bufferTimeDefault = mediaPlayerModel.getBufferTimeDefault(); if (bolaState.bufferTimeDefault !== bufferTimeDefault) { - const params = calculateBolaParameters(bufferTimeDefault, bolaState.bitrates, bolaState.utilities); + const params = _calculateBolaParameters(bufferTimeDefault, bolaState.representations, bolaState.utilities); if (params.Vp !== bolaState.Vp || params.gp !== bolaState.gp) { // correct placeholder buffer using two criteria: // 1. do not change effective buffer level at effectiveBufferLevel === MINIMUM_BUFFER_S ( === Vp * gp ) @@ -172,72 +256,57 @@ function BolaRule(config) { } } - function getBolaState(rulesContext) { - const mediaType = rulesContext.getMediaType(); - let bolaState = bolaStateDict[mediaType]; - if (!bolaState) { - bolaState = getInitialBolaState(rulesContext); - bolaStateDict[mediaType] = bolaState; - } else if (bolaState.state !== BOLA_STATE_ONE_BITRATE) { - checkBolaStateBufferTimeDefault(bolaState, mediaType); - } - return bolaState; - } - - // The core idea of BOLA. - function getQualityFromBufferLevel(bolaState, bufferLevel) { - const bitrateCount = bolaState.bitrates.length; + /** + * The core idea of BOLA. + * @param bolaState + * @param bufferLevel + * @returns {Representation} + * @private + */ + function _getRepresentationFromBufferLevel(bolaState, bufferLevel) { + const bitrateCount = bolaState.representations.length; let quality = NaN; let score = NaN; for (let i = 0; i < bitrateCount; ++i) { - let s = (bolaState.Vp * (bolaState.utilities[i] + bolaState.gp) - bufferLevel) / bolaState.bitrates[i]; + let s = (bolaState.Vp * (bolaState.utilities[i] + bolaState.gp) - bufferLevel) / bolaState.representations[i].bandwidth; if (isNaN(score) || s >= score) { score = s; quality = i; } } - return quality; + return bolaState.representations[quality]; } - // maximum buffer level which prefers to download at quality rather than wait - function maxBufferLevelForQuality(bolaState, quality) { - return bolaState.Vp * (bolaState.utilities[quality] + bolaState.gp); + /** + * Maximum buffer level which prefers to download at quality rather than wait + * @param bolaState + * @param representation + * @returns {number} + * @private + */ + function _maxBufferLevelForRepresentation(bolaState, representation) { + return bolaState.Vp * (bolaState.utilities[representation.absoluteIndex] + bolaState.gp); } - // the minimum buffer level that would cause BOLA to choose quality rather than a lower bitrate - function minBufferLevelForQuality(bolaState, quality) { - const qBitrate = bolaState.bitrates[quality]; - const qUtility = bolaState.utilities[quality]; - - let min = 0; - for (let i = quality - 1; i >= 0; --i) { - // for each bitrate less than bitrates[quality], BOLA should prefer quality (unless other bitrate has higher utility) - if (bolaState.utilities[i] < bolaState.utilities[quality]) { - const iBitrate = bolaState.bitrates[i]; - const iUtility = bolaState.utilities[i]; - - const level = bolaState.Vp * (bolaState.gp + (qBitrate * iUtility - iBitrate * qUtility) / (qBitrate - iBitrate)); - min = Math.max(min, level); // we want min to be small but at least level(i) for all i - } - } - return min; - } - /* - * The placeholder buffer increases the effective buffer that is used to calculate the bitrate. - * There are two main reasons we might want to increase the placeholder buffer: + /** + * The placeholder buffer increases the effective buffer that is used to calculate the bitrate. + * There are two main reasons we might want to increase the placeholder buffer: * - * 1. When a segment finishes downloading, we would expect to get a call on getMaxIndex() regarding the quality for - * the next segment. However, there might be a delay before the next call. E.g. when streaming live content, the - * next segment might not be available yet. If the call to getMaxIndex() does happens after a delay, we don't - * want the delay to change the BOLA decision - we only want to factor download time to decide on bitrate level. + * 1. When a segment finishes downloading, we would expect to get a call on getSwitchRequest() regarding the quality for + * the next segment. However, there might be a delay before the next call. E.g. when streaming live content, the + * next segment might not be available yet. If the call to getSwitchRequest() does happens after a delay, we don't + * want the delay to change the BOLA decision - we only want to factor download time to decide on bitrate level. * - * 2. It is possible to get a call to getMaxIndex() without having a segment download. The buffer target in dash.js - * is different for top-quality segments and lower-quality segments. If getMaxIndex() returns a lower-than-top - * quality, then the buffer controller might decide not to download a segment. When dash.js is ready for the next - * segment, getMaxIndex() will be called again. We don't want this extra delay to factor in the bitrate decision. + * 2. It is possible to get a call to getSwitchRequest() without having a segment download. The buffer target in dash.js + * is different for top-quality segments and lower-quality segments. If getSwitchRequest() returns a lower-than-top + * quality, then the buffer controller might decide not to download a segment. When dash.js is ready for the next + * segment, getSwitchRequest() will be called again. We don't want this extra delay to factor in the bitrate decision. + * @param bolaState + * @param mediaType + * @private */ - function updatePlaceholderBuffer(bolaState, mediaType) { + function _updatePlaceholderBuffer(bolaState, mediaType) { const nowMs = Date.now(); if (!isNaN(bolaState.lastSegmentFinishTimeMs)) { @@ -255,38 +324,12 @@ function BolaRule(config) { bolaState.lastSegmentRequestTimeMs = NaN; bolaState.lastSegmentFinishTimeMs = NaN; - checkBolaStateBufferTimeDefault(bolaState, mediaType); + _checkBolaStateBufferTimeDefault(bolaState, mediaType); } - function onBufferEmpty(e) { - // if we rebuffer, we don't want the placeholder buffer to artificially raise BOLA quality - const mediaType = e.mediaType; - // if audio buffer runs empty (due to track switch for example) then reset placeholder buffer only for audio (to avoid decrease video BOLA quality) - const stateDict = mediaType === Constants.AUDIO ? [Constants.AUDIO] : bolaStateDict; - for (const mediaType in stateDict) { - if (bolaStateDict.hasOwnProperty(mediaType) && bolaStateDict[mediaType].state === BOLA_STATE_STEADY) { - bolaStateDict[mediaType].placeholderBuffer = 0; - } - } - } - - function onPlaybackSeeking() { - // TODO: 1. Verify what happens if we seek mid-fragment. - // TODO: 2. If e.g. we have 10s fragments and seek, we might want to download the first fragment at a lower quality to restart playback quickly. - for (const mediaType in bolaStateDict) { - if (bolaStateDict.hasOwnProperty(mediaType)) { - const bolaState = bolaStateDict[mediaType]; - if (bolaState.state !== BOLA_STATE_ONE_BITRATE) { - bolaState.state = BOLA_STATE_STARTUP; // TODO: BOLA_STATE_SEEK? - clearBolaStateOnSeek(bolaState); - } - } - } - } - - function onMediaFragmentLoaded(e) { - if (e && e.chunk && e.chunk.mediaInfo) { - const bolaState = bolaStateDict[e.chunk.mediaInfo.type]; + function _onMediaFragmentLoaded(e) { + if (e && e.chunk && e.chunk.representation.mediaInfo && bolaStateDict[e.streamId]) { + const bolaState = bolaStateDict[e.streamId][e.chunk.representation.mediaInfo.type]; if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { const start = e.chunk.start; if (isNaN(bolaState.mostAdvancedSegmentStart) || start > bolaState.mostAdvancedSegmentStart) { @@ -298,26 +341,13 @@ function BolaRule(config) { bolaState.lastSegmentStart = start; bolaState.lastSegmentDurationS = e.chunk.duration; - bolaState.lastQuality = e.chunk.quality; - - checkNewSegment(bolaState, e.chunk.mediaInfo.type); + bolaState.currentRepresentation = e.chunk.representation; + _checkNewSegment(bolaState, e.chunk.representation.mediaInfo.type); } } } - function onMetricAdded(e) { - if (e && e.metric === MetricsConstants.HTTP_REQUEST && e.value && e.value.type === HTTPRequest.MEDIA_SEGMENT_TYPE && e.value.trace && e.value.trace.length) { - const bolaState = bolaStateDict[e.mediaType]; - if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { - bolaState.lastSegmentRequestTimeMs = e.value.trequest.getTime(); - bolaState.lastSegmentFinishTimeMs = e.value._tfinish.getTime(); - - checkNewSegment(bolaState, e.mediaType); - } - } - } - - /* + /** * When a new segment is downloaded, we get two notifications: onMediaFragmentLoaded() and onMetricAdded(). It is * possible that the quality for the downloaded segment was lower (not higher) than the quality indicated by BOLA. * This might happen because of other rules such as the DroppedFramesRule. When this happens, we trim the @@ -327,8 +357,10 @@ function BolaRule(config) { * We should also check for replacement segments (fast switching). In this case, a segment is downloaded but does * not grow the actual buffer. Fast switching might cause the buffer to deplete, causing BOLA to drop the bitrate. * We avoid this by growing the placeholder buffer. + * @param bolaState + * @param mediaType */ - function checkNewSegment(bolaState, mediaType) { + function _checkNewSegment(bolaState, mediaType) { if (!isNaN(bolaState.lastSegmentStart) && !isNaN(bolaState.lastSegmentRequestTimeMs) && !isNaN(bolaState.placeholderBuffer)) { bolaState.placeholderBuffer *= PLACEHOLDER_BUFFER_DECAY; @@ -336,7 +368,7 @@ function BolaRule(config) { if (!isNaN(bolaState.lastSegmentFinishTimeMs)) { const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); const bufferAtLastSegmentRequest = bufferLevel + 0.001 * (bolaState.lastSegmentFinishTimeMs - bolaState.lastSegmentRequestTimeMs); // estimate - const maxEffectiveBufferForLastSegment = maxBufferLevelForQuality(bolaState, bolaState.lastQuality); + const maxEffectiveBufferForLastSegment = _maxBufferLevelForRepresentation(bolaState, bolaState.currentRepresentation); const maxPlaceholderBuffer = Math.max(0, maxEffectiveBufferForLastSegment - bufferAtLastSegmentRequest); bolaState.placeholderBuffer = Math.min(maxPlaceholderBuffer, bolaState.placeholderBuffer); } @@ -353,26 +385,48 @@ function BolaRule(config) { } } - function onQualityChangeRequested(e) { - // Useful to store change requests when abandoning a download. - if (e) { - const bolaState = bolaStateDict[e.mediaType]; - if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { - bolaState.abrQuality = e.newQuality; + /** + * The minimum buffer level that would cause BOLA to choose target quality rather than a lower bitrate + * @param bolaState + * @param representation + * @returns {number} + * @private + */ + function _minBufferLevelForRepresentation(bolaState, representation) { + const absoluteIndex = representation.absoluteIndex + const qBitrate = representation.bandwidth; + const qUtility = bolaState.utilities[absoluteIndex]; + + let min = 0; + + // for each bitrate less than bitrates[absoluteIndex], BOLA should prefer quality (unless other bitrate has higher utility) + for (let i = absoluteIndex - 1; i >= 0; --i) { + if (bolaState.utilities[i] < bolaState.utilities[absoluteIndex]) { + const iBitrate = bolaState.representations[i].bandwidth; + const iUtility = bolaState.utilities[i]; + + const level = bolaState.Vp * (bolaState.gp + (qBitrate * iUtility - iBitrate * qUtility) / (qBitrate - iBitrate)); + min = Math.max(min, level); // we want min to be small but at least level(i) for all i } } + return min; } - function onFragmentLoadingAbandoned(e) { + /** + * + * @param e + * @private + */ + function _onFragmentLoadingAbandoned(e) { if (e) { - const bolaState = bolaStateDict[e.mediaType]; + const bolaState = bolaStateDict[e.streamId][e.mediaType]; if (bolaState && bolaState.state !== BOLA_STATE_ONE_BITRATE) { // deflate placeholderBuffer - note that we want to be conservative when abandoning const bufferLevel = dashMetrics.getCurrentBufferLevel(e.mediaType); let wantEffectiveBufferLevel; - if (bolaState.abrQuality > 0) { + if (bolaState.currentRepresentation.absoluteIndex > 0) { // deflate to point where BOLA just chooses newQuality over newQuality-1 - wantEffectiveBufferLevel = minBufferLevelForQuality(bolaState, bolaState.abrQuality); + wantEffectiveBufferLevel = _minBufferLevelForRepresentation(bolaState, bolaState.currentRepresentation); } else { wantEffectiveBufferLevel = MINIMUM_BUFFER_S; } @@ -382,120 +436,138 @@ function BolaRule(config) { } } - function getMaxIndex(rulesContext) { - try { - const switchRequest = SwitchRequest(context).create(); - const mediaInfo = rulesContext.getMediaInfo(); - const mediaType = rulesContext.getMediaType(); - const scheduleController = rulesContext.getScheduleController(); - const streamInfo = rulesContext.getStreamInfo(); - const abrController = rulesContext.getAbrController(); - const throughputController = rulesContext.getThroughputController(); - const streamId = streamInfo ? streamInfo.id : null; - switchRequest.reason = switchRequest.reason || {}; - - scheduleController.setTimeToLoadDelay(0); + /** + * At startup we decide on the best quality based on the throughput. The placeholderBuffer is adjusted accordingly. + * @param switchRequest + * @param rulesContext + * @param bolaState + * @private + */ + function _handleBolaStateStartup(switchRequest, rulesContext, bolaState) { + const mediaType = rulesContext.getMediaType(); + const throughputController = rulesContext.getThroughputController(); + const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); - const bolaState = getBolaState(rulesContext); + if (isNaN(safeThroughput)) { + return + } - if (bolaState.state === BOLA_STATE_ONE_BITRATE) { - // shouldn't even have been called - return switchRequest; - } + const mediaInfo = rulesContext.getMediaInfo(); + const representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, safeThroughput, true); + const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); + switchRequest.representation = representation; + switchRequest.reason.throughput = safeThroughput; + bolaState.placeholderBuffer = Math.max(0, _minBufferLevelForRepresentation(bolaState, representation) - bufferLevel); + bolaState.currentRepresentation = representation; + + if (!isNaN(bolaState.lastSegmentDurationS) && bufferLevel >= bolaState.lastSegmentDurationS) { + bolaState.state = BOLA_STATE_STEADY; + } + } - const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); - const throughput = throughputController.getAverageThroughput(mediaType); - const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); - const latency = throughputController.getAverageLatency(mediaType); - let quality; + /** + * + * @param switchRequest + * @param rulesContext + * @param bolaState + * @private + */ + function _handleBolaStateSteady(switchRequest, rulesContext, bolaState) { + const mediaType = rulesContext.getMediaType(); + const throughputController = rulesContext.getThroughputController(); + const mediaInfo = rulesContext.getMediaInfo(); + const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); + const scheduleController = rulesContext.getScheduleController(); + _updatePlaceholderBuffer(bolaState, mediaType); + + const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); + // NB: The placeholder buffer is added to bufferLevel to come up with a bitrate. + // This might lead BOLA to be too optimistic and to choose a bitrate that would lead to rebuffering - + // if the real buffer bufferLevel runs out, the placeholder buffer cannot prevent rebuffering. + // However, the InsufficientBufferRule takes care of this scenario. + let representation = _getRepresentationFromBufferLevel(bolaState, bufferLevel + bolaState.placeholderBuffer); + + // we want to avoid oscillations + // We implement the "BOLA-O" variant: when network bandwidth lies between two encoded bitrate levels, stick to the lowest level. + const representationForThroughput = abrController.getOptimalRepresentationForBitrate(mediaInfo, safeThroughput, true); + if (representation.absoluteIndex > bolaState.currentRepresentation.absoluteIndex && representation.absoluteIndex > representationForThroughput.absoluteIndex) { + // only intervene if we are trying to *increase* quality to an *unsustainable* level + // we are only avoid oscillations - do not drop below last quality + representation = representationForThroughput.absoluteIndex > bolaState.currentRepresentation.absoluteIndex ? representationForThroughput : bolaState.currentRepresentation; + } - switchRequest.reason.state = bolaState.state; - switchRequest.reason.throughput = throughput; - switchRequest.reason.latency = latency; + // We do not want to overfill buffer with low quality chunks. + // Note that there will be no delay if buffer level is below MINIMUM_BUFFER_S, probably even with some margin higher than MINIMUM_BUFFER_S. + let delayS = Math.max(0, bufferLevel + bolaState.placeholderBuffer - _maxBufferLevelForRepresentation(bolaState, representation)); - if (isNaN(throughput)) { // isNaN(throughput) === isNaN(safeThroughput) === isNaN(latency) - // still starting up - not enough information - return switchRequest; + // First reduce placeholder buffer, then tell schedule controller to pause. + if (delayS <= bolaState.placeholderBuffer) { + bolaState.placeholderBuffer -= delayS; + delayS = 0; + } else { + delayS -= bolaState.placeholderBuffer; + bolaState.placeholderBuffer = 0; + + if (!abrController.isPlayingAtTopQuality(representation)) { + // At top quality, allow schedule controller to decide how far to fill buffer. + scheduleController.setTimeToLoadDelay(1000 * delayS); + } else { + delayS = 0; } + } - switch (bolaState.state) { - case BOLA_STATE_STARTUP: - quality = abrController.getQualityForBitrate(mediaInfo, safeThroughput, streamId, latency); - - switchRequest.quality = quality; - switchRequest.reason.throughput = safeThroughput; + switchRequest.representation = representation; + switchRequest.reason.throughput = safeThroughput; + switchRequest.reason.bufferLevel = bufferLevel; + switchRequest.reason.placeholderBuffer = bolaState.placeholderBuffer; + switchRequest.reason.delay = delayS; + bolaState.currentRepresentation = representation; + } - bolaState.placeholderBuffer = Math.max(0, minBufferLevelForQuality(bolaState, quality) - bufferLevel); - bolaState.lastQuality = quality; + /** + * Bad state we should not have arrived here. Try to recover. + * @param switchRequest + * @param rulesContext + * @param bolaState + */ + function _handleBolaStateBad(switchRequest, rulesContext, bolaState) { + logger.debug('BOLA ABR rule invoked in bad state.'); + const mediaInfo = rulesContext.getMediaInfo(); + const mediaType = rulesContext.getMediaType(); + const throughputController = rulesContext.getThroughputController(); + const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); + switchRequest.representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, safeThroughput, true); + switchRequest.reason.state = bolaState.state; + switchRequest.reason.throughput = safeThroughput; + bolaState.state = BOLA_STATE_STARTUP; + _clearBolaStateOnSeek(bolaState); + } - if (!isNaN(bolaState.lastSegmentDurationS) && bufferLevel >= bolaState.lastSegmentDurationS) { - bolaState.state = BOLA_STATE_STEADY; - } + function getSwitchRequest(rulesContext) { + try { + const switchRequest = SwitchRequest(context).create(); - break; // BOLA_STATE_STARTUP + const scheduleController = rulesContext.getScheduleController(); + scheduleController.setTimeToLoadDelay(0); - case BOLA_STATE_STEADY: + switchRequest.rule = this.getClassName(); + switchRequest.reason = switchRequest.reason || {}; - // NB: The placeholder buffer is added to bufferLevel to come up with a bitrate. - // This might lead BOLA to be too optimistic and to choose a bitrate that would lead to rebuffering - - // if the real buffer bufferLevel runs out, the placeholder buffer cannot prevent rebuffering. - // However, the InsufficientBufferRule takes care of this scenario. - - updatePlaceholderBuffer(bolaState, mediaType); - - quality = getQualityFromBufferLevel(bolaState, bufferLevel + bolaState.placeholderBuffer); - - // we want to avoid oscillations - // We implement the "BOLA-O" variant: when network bandwidth lies between two encoded bitrate levels, stick to the lowest level. - const qualityForThroughput = abrController.getQualityForBitrate(mediaInfo, safeThroughput, streamId, latency); - if (quality > bolaState.lastQuality && quality > qualityForThroughput) { - // only intervene if we are trying to *increase* quality to an *unsustainable* level - // we are only avoid oscillations - do not drop below last quality - - quality = Math.max(qualityForThroughput, bolaState.lastQuality); - } - - // We do not want to overfill buffer with low quality chunks. - // Note that there will be no delay if buffer level is below MINIMUM_BUFFER_S, probably even with some margin higher than MINIMUM_BUFFER_S. - let delayS = Math.max(0, bufferLevel + bolaState.placeholderBuffer - maxBufferLevelForQuality(bolaState, quality)); - - // First reduce placeholder buffer, then tell schedule controller to pause. - if (delayS <= bolaState.placeholderBuffer) { - bolaState.placeholderBuffer -= delayS; - delayS = 0; - } else { - delayS -= bolaState.placeholderBuffer; - bolaState.placeholderBuffer = 0; - - if (quality < abrController.getMaxAllowedIndexFor(mediaType, streamId)) { - // At top quality, allow schedule controller to decide how far to fill buffer. - scheduleController.setTimeToLoadDelay(1000 * delayS); - } else { - delayS = 0; - } - } - - switchRequest.quality = quality; - switchRequest.reason.throughput = throughput; - switchRequest.reason.latency = latency; - switchRequest.reason.bufferLevel = bufferLevel; - switchRequest.reason.placeholderBuffer = bolaState.placeholderBuffer; - switchRequest.reason.delay = delayS; - - bolaState.lastQuality = quality; - // keep bolaState.state === BOLA_STATE_STEADY - - break; // BOLA_STATE_STEADY + const bolaState = _getBolaState(rulesContext); + switchRequest.reason.state = bolaState.state; + switch (bolaState.state) { + case BOLA_STATE_ONE_BITRATE: + break; + case BOLA_STATE_STARTUP: + _handleBolaStateStartup(switchRequest, rulesContext, bolaState); + break; + case BOLA_STATE_STEADY: + _handleBolaStateSteady(switchRequest, rulesContext, bolaState) + break; default: - logger.debug('BOLA ABR rule invoked in bad state.'); - // should not arrive here, try to recover - switchRequest.quality = abrController.getQualityForBitrate(mediaInfo, safeThroughput, streamId, latency); - switchRequest.reason.state = bolaState.state; - switchRequest.reason.throughput = safeThroughput; - switchRequest.reason.latency = latency; - bolaState.state = BOLA_STATE_STARTUP; - clearBolaStateOnSeek(bolaState); + _handleBolaStateBad(switchRequest, rulesContext, bolaState) + break; } return switchRequest; @@ -505,6 +577,22 @@ function BolaRule(config) { } } + function _getBolaState(rulesContext) { + const mediaType = rulesContext.getMediaType(); + const streamId = rulesContext.getStreamInfo().id; + if (!bolaStateDict[streamId]) { + bolaStateDict[streamId] = {}; + } + let bolaState = bolaStateDict[streamId][mediaType]; + if (!bolaState) { + bolaState = _getInitialBolaState(rulesContext); + bolaStateDict[streamId][mediaType] = bolaState; + } else if (bolaState.state !== BOLA_STATE_ONE_BITRATE) { + _checkBolaStateBufferTimeDefault(bolaState, mediaType); + } + return bolaState; + } + function resetInitialSettings() { bolaStateDict = {}; } @@ -512,16 +600,16 @@ function BolaRule(config) { function reset() { resetInitialSettings(); - eventBus.off(MediaPlayerEvents.BUFFER_EMPTY, onBufferEmpty, instance); - eventBus.off(MediaPlayerEvents.PLAYBACK_SEEKING, onPlaybackSeeking, instance); - eventBus.off(MediaPlayerEvents.METRIC_ADDED, onMetricAdded, instance); - eventBus.off(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChangeRequested, instance); - eventBus.off(MediaPlayerEvents.FRAGMENT_LOADING_ABANDONED, onFragmentLoadingAbandoned, instance); - eventBus.off(Events.MEDIA_FRAGMENT_LOADED, onMediaFragmentLoaded, instance); + eventBus.off(MediaPlayerEvents.BUFFER_EMPTY, _onBufferEmpty, instance); + eventBus.off(MediaPlayerEvents.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); + eventBus.off(MediaPlayerEvents.METRIC_ADDED, _onMetricAdded, instance); + eventBus.off(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, _onQualityChangeRequested, instance); + eventBus.off(MediaPlayerEvents.FRAGMENT_LOADING_ABANDONED, _onFragmentLoadingAbandoned, instance); + eventBus.off(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); } instance = { - getMaxIndex, + getSwitchRequest, reset }; diff --git a/src/streaming/rules/abr/DroppedFramesRule.js b/src/streaming/rules/abr/DroppedFramesRule.js index 44394618c7..b6c302e62a 100644 --- a/src/streaming/rules/abr/DroppedFramesRule.js +++ b/src/streaming/rules/abr/DroppedFramesRule.js @@ -15,8 +15,9 @@ function DroppedFramesRule() { logger = Debug(context).getInstance().getLogger(instance); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { const switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); if (!rulesContext || !rulesContext.hasOwnProperty('getDroppedFramesHistory')) { return switchRequest; @@ -24,39 +25,46 @@ function DroppedFramesRule() { const droppedFramesHistory = rulesContext.getDroppedFramesHistory(); const streamId = rulesContext.getStreamInfo().id; + const mediaInfo = rulesContext.getMediaInfo(); + const abrController = rulesContext.getAbrController(); if (droppedFramesHistory) { const dfh = droppedFramesHistory.getFrameHistory(streamId); - if (!dfh || dfh.length === 0) { + if (!dfh || Object.keys(dfh.length) === 0) { return switchRequest; } let droppedFrames = 0; let totalFrames = 0; - let maxIndex = SwitchRequest.NO_CHANGE; + const representations = abrController.getPossibleVoRepresentations(mediaInfo, true); + let newRepresentation = null; - //No point in measuring dropped frames for the zeroeth index. - for (let i = 1; i < dfh.length; i++) { - if (dfh[i]) { - droppedFrames = dfh[i].droppedVideoFrames; - totalFrames = dfh[i].totalVideoFrames; + //No point in measuring dropped frames for the first index. + for (let i = 1; i < representations.length; i++) { + const currentRepresentation = representations[i]; + if (currentRepresentation && dfh[currentRepresentation.id]) { + droppedFrames = dfh[currentRepresentation.id].droppedVideoFrames; + totalFrames = dfh[currentRepresentation.id].totalVideoFrames; if (totalFrames > GOOD_SAMPLE_SIZE && droppedFrames / totalFrames > DROPPED_PERCENTAGE_FORBID) { - maxIndex = i - 1; - logger.debug('index: ' + maxIndex + ' Dropped Frames: ' + droppedFrames + ' Total Frames: ' + totalFrames); + newRepresentation = representations[i - 1]; + logger.debug('index: ' + newRepresentation.absoluteIndex + ' Dropped Frames: ' + droppedFrames + ' Total Frames: ' + totalFrames); break; } } } - return SwitchRequest(context).create(maxIndex, { droppedFrames: droppedFrames }); + if (newRepresentation) { + switchRequest.representation = newRepresentation; + switchRequest.reason = { droppedFrames }; + } } return switchRequest; } instance = { - getMaxIndex + getSwitchRequest }; setup(); diff --git a/src/streaming/rules/abr/InsufficientBufferRule.js b/src/streaming/rules/abr/InsufficientBufferRule.js index 3dfe97f693..a6775d0b64 100644 --- a/src/streaming/rules/abr/InsufficientBufferRule.js +++ b/src/streaming/rules/abr/InsufficientBufferRule.js @@ -53,85 +53,74 @@ function InsufficientBufferRule(config) { function setup() { logger = Debug(context).getInstance().getLogger(instance); - resetInitialSettings(); + _resetInitialSettings(); eventBus.on(MediaPlayerEvents.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); eventBus.on(Events.BYTES_APPENDED_END_FRAGMENT, _onBytesAppended, instance); } - function checkConfig() { - if (!dashMetrics || !dashMetrics.hasOwnProperty('getCurrentBufferLevel') || !dashMetrics.hasOwnProperty('getCurrentBufferState')) { - throw new Error(Constants.MISSING_CONFIG_ERROR); - } - } - /** - * If a BUFFER_EMPTY event happens, then InsufficientBufferRule returns switchRequest.quality=0 until BUFFER_LOADED happens. - * Otherwise InsufficientBufferRule gives a maximum bitrate depending on throughput and bufferLevel such that + * If a BUFFER_EMPTY event happens, then InsufficientBufferRule returns switchRequest. Quality=0 until BUFFER_LOADED happens. + * Otherwise, InsufficientBufferRule gives a maximum bitrate depending on throughput and bufferLevel such that * a whole fragment can be downloaded before the buffer runs out, subject to a conservative safety factor of 0.5. * If the bufferLevel is low, then InsufficientBufferRule avoids rebuffering risk. * If the bufferLevel is high, then InsufficientBufferRule give a high MaxIndex allowing other rules to take over. * @param rulesContext * @return {object} */ - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { const switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); if (!rulesContext || !rulesContext.hasOwnProperty('getMediaType')) { return switchRequest; } - checkConfig(); - const mediaType = rulesContext.getMediaType(); const currentBufferState = dashMetrics.getCurrentBufferState(mediaType); - const representationInfo = rulesContext.getRepresentationInfo(); - const fragmentDuration = representationInfo.fragmentDuration; - const streamInfo = rulesContext.getStreamInfo(); - const streamId = streamInfo ? streamInfo.id : null; + const voRepresentation = rulesContext.getRepresentation(); + const fragmentDuration = voRepresentation.fragmentDuration; const scheduleController = rulesContext.getScheduleController(); const playbackController = scheduleController.getPlaybackController(); - // Don't ask for a bitrate change if there is not info about buffer state or if fragmentDuration is not defined + // Don't ask for a bitrate change if there is no info about buffer state or if fragmentDuration is not defined const lowLatencyEnabled = playbackController.getLowLatencyModeEnabled(); - if (shouldIgnore(lowLatencyEnabled, mediaType) || !fragmentDuration) { + if (_shouldIgnore(lowLatencyEnabled, mediaType) || !fragmentDuration) { return switchRequest; } + const mediaInfo = rulesContext.getMediaInfo(); + const abrController = rulesContext.getAbrController(); if (currentBufferState && currentBufferState.state === MetricsConstants.BUFFER_EMPTY) { logger.debug('[' + mediaType + '] Switch to index 0; buffer is empty.'); - switchRequest.quality = 0; + switchRequest.representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, 0, true); switchRequest.reason = 'InsufficientBufferRule: Buffer is empty'; } else { - const mediaInfo = rulesContext.getMediaInfo(); - const abrController = rulesContext.getAbrController(); const throughputController = rulesContext.getThroughputController(); const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType); - const throughput = throughputController.getAverageThroughput(mediaType); - const latency = throughputController.getAverageLatency(mediaType); + const throughput = throughputController.getAverageThroughput(mediaType, null, NaN); const bitrate = throughput * (bufferLevel / fragmentDuration) * INSUFFICIENT_BUFFER_SAFETY_FACTOR; - switchRequest.quality = abrController.getQualityForBitrate(mediaInfo, bitrate, streamId, latency); + switchRequest.representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, bitrate, true); switchRequest.reason = 'InsufficientBufferRule: being conservative to avoid immediate rebuffering'; } return switchRequest; - } - function shouldIgnore(lowLatencyEnabled, mediaType) { + function _shouldIgnore(lowLatencyEnabled, mediaType) { return !lowLatencyEnabled && bufferStateDict[mediaType].ignoreCount > 0; } - function resetInitialSettings() { + function _resetInitialSettings() { bufferStateDict = {}; bufferStateDict[Constants.VIDEO] = { ignoreCount: SEGMENT_IGNORE_COUNT }; bufferStateDict[Constants.AUDIO] = { ignoreCount: SEGMENT_IGNORE_COUNT }; } function _onPlaybackSeeking() { - resetInitialSettings(); + _resetInitialSettings(); } function _onBytesAppended(e) { @@ -143,13 +132,13 @@ function InsufficientBufferRule(config) { } function reset() { - resetInitialSettings(); + _resetInitialSettings(); eventBus.off(MediaPlayerEvents.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); eventBus.off(Events.BYTES_APPENDED_END_FRAGMENT, _onBytesAppended, instance); } instance = { - getMaxIndex, + getSwitchRequest, reset }; diff --git a/src/streaming/rules/abr/L2ARule.js b/src/streaming/rules/abr/L2ARule.js index 7139453fd4..09ab8db9ca 100644 --- a/src/streaming/rules/abr/L2ARule.js +++ b/src/streaming/rules/abr/L2ARule.js @@ -40,10 +40,12 @@ import Events from '../../../core/events/Events.js'; import Debug from '../../../core/Debug.js'; import Constants from '../../constants/Constants.js'; -const L2A_STATE_ONE_BITRATE = 0; // If there is only one bitrate (or initialization failed), always return NO_CHANGE. -const L2A_STATE_STARTUP = 1; // Set placeholder buffer such that we download fragments at most recently measured throughput. -const L2A_STATE_STEADY = 2; // Buffer primed, we switch to steady operation. - +const L2A_STATE_ONE_BITRATE = 'L2A_STATE_ONE_BITRATE'; // If there is only one bitrate (or initialization failed), always return NO_CHANGE. +const L2A_STATE_STARTUP = 'L2A_STATE_STARTUP'; // Set placeholder buffer such that we download fragments at most recently measured throughput. +const L2A_STATE_STEADY = 'L2A_STATE_STEADY'; // Buffer primed, we switch to steady operation. +const HORIZON = 4; // Optimization horizon (The amount of steps required to achieve convergence) +const VL = Math.pow(HORIZON, 0.99);// Cautiousness parameter, used to control aggressiveness of the bitrate decision process. +const REACT = 2; function L2ARule(config) { config = config || {}; @@ -67,7 +69,6 @@ function L2ARule(config) { eventBus.on(Events.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); eventBus.on(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); eventBus.on(Events.METRIC_ADDED, _onMetricAdded, instance); - eventBus.on(Events.QUALITY_CHANGE_REQUESTED, _onQualityChangeRequested, instance); } /** @@ -79,13 +80,9 @@ function L2ARule(config) { function _getInitialL2AState(rulesContext) { const initialState = {}; const mediaInfo = rulesContext.getMediaInfo(); - const bitrates = mediaInfo.bitrateList.map((b) => { - return b.bandwidth / 1000; - }); initialState.state = L2A_STATE_STARTUP; - initialState.bitrates = bitrates; - initialState.lastQuality = 0; + initialState.currentRepresentation = null; _initializeL2AParameters(mediaInfo); _clearL2AStateOnSeek(initialState); @@ -170,9 +167,9 @@ function L2ARule(config) { * @private */ function _onMediaFragmentLoaded(e) { - if (e && e.chunk && e.chunk.mediaInfo) { - const l2AState = l2AStateDict[e.chunk.mediaInfo.type]; - const l2AParameters = l2AParameterDict[e.chunk.mediaInfo.type]; + if (e && e.chunk && e.chunk.representation && e.chunk.representation.mediaInfo) { + const l2AState = l2AStateDict[e.chunk.representation.mediaInfo.type]; + const l2AParameters = l2AParameterDict[e.chunk.representation.mediaInfo.type]; if (l2AState && l2AState.state !== L2A_STATE_ONE_BITRATE) { const start = e.chunk.start; @@ -185,7 +182,7 @@ function L2ARule(config) { l2AState.lastSegmentStart = start; l2AState.lastSegmentDurationS = e.chunk.duration; - l2AState.lastQuality = e.chunk.quality; + l2AState.currentRepresentation = e.chunk.representation; _checkNewSegment(l2AState, l2AParameters); } @@ -225,20 +222,6 @@ function L2ARule(config) { } } - /** - * Event handler for the qualityChangeRequested event - * @param {object} e - * @private - */ - function _onQualityChangeRequested(e) { - // Useful to store change requests when abandoning a download. - if (e && e.mediaType) { - const L2AState = l2AStateDict[e.mediaType]; - if (L2AState && L2AState.state !== L2A_STATE_ONE_BITRATE) { - L2AState.abrQuality = e.newQuality; - } - } - } /** * Dot multiplication of two arrays @@ -296,174 +279,187 @@ function L2ARule(config) { return x; } + /** + * + * @param rulesContext + * @param switchRequest + * @param l2AState + * @private + */ + function _handleStartupState(rulesContext, switchRequest, l2AState) { + const mediaInfo = rulesContext.getMediaInfo(); + const mediaType = rulesContext.getMediaType(); + const throughputController = rulesContext.getThroughputController(); + const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); + + if (isNaN(safeThroughput)) { + // still starting up - not enough information + return switchRequest; + } + + const abrController = rulesContext.getAbrController(); + const representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, safeThroughput, true);//During strat-up phase abr.controller is responsible for bitrate decisions. + const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType, true); + const l2AParameter = l2AParameterDict[mediaType]; + const possibleRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + + switchRequest.representation = representation; + switchRequest.reason.throughput = safeThroughput; + l2AState.currentRepresentation = representation; + + if (!isNaN(l2AState.lastSegmentDurationS) && bufferLevel >= l2AParameter.B_target) { + l2AState.state = L2A_STATE_STEADY; + l2AParameter.Q = VL;// Initialization of Q langrangian multiplier + // Update of probability vector w, to be used in main adaptation logic of L2A below (steady state) + for (let i = 0; i < possibleRepresentations.length; ++i) { + const rep = possibleRepresentations[i]; + if (rep.id === l2AState.currentRepresentation.id) { + l2AParameter.prev_w[i] = 1; + } else { + l2AParameter.prev_w[i] = 0; + } + } + } + } + + function _handleSteadyState(rulesContext, switchRequest, l2AState) { + let diff1 = []; //Used to calculate the difference between consecutive decisions (w-w_prev) + const throughputController = rulesContext.getThroughputController(); + const mediaType = rulesContext.getMediaType(); + let lastThroughput = throughputController.getAverageThroughput(mediaType, Constants.THROUGHPUT_CALCULATION_MODES.ARITHMETIC_MEAN, 1); + let currentHttpRequest = dashMetrics.getCurrentHttpRequest(mediaType); + let selectedRepresentation = null; + const l2AParameter = l2AParameterDict[mediaType]; + + //To avoid division with 0 (avoid infinity) in case of an absolute network outage + if (lastThroughput < 1) { + lastThroughput = 1; + } + + // Note that for SegmentBase addressing the request url does not change. + // As this is not relevant for low latency streaming at this point the check below is sufficient + if (currentHttpRequest.url === l2AState.lastSegmentUrl || + currentHttpRequest.type === HTTPRequest.INIT_SEGMENT_TYPE) { + // No change to inputs or init segment so use previously calculated quality + selectedRepresentation = l2AState.currentRepresentation; + + } else { // Recalculate Q + let V = l2AState.lastSegmentDurationS; + let sign = 1; + + //Main adaptation logic of L2A-LL + const abrController = rulesContext.getAbrController(); + const mediaInfo = rulesContext.getMediaInfo(); + const possibleRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + const videoModel = rulesContext.getVideoModel(); + let currentPlaybackRate = videoModel.getPlaybackRate(); + const alpha = Math.max(Math.pow(HORIZON, 1), VL * Math.sqrt(HORIZON));// Step size, used for gradient descent exploration granularity + for (let i = 0; i < possibleRepresentations.length; ++i) { + const rep = possibleRepresentations[i]; + + // In this case buffer would deplete, leading to a stall, which increases latency and thus the particular probability of selection of bitrate[i] should be decreased. + if (currentPlaybackRate * rep.bitrateInKbit > lastThroughput) { + sign = -1; + } + + // The objective of L2A is to minimize the overall latency=request-response time + buffer length after download+ potential stalling (if buffer less than chunk downlad time) + l2AParameter.w[i] = l2AParameter.prev_w[i] + sign * (V / (2 * alpha)) * ((l2AParameter.Q + VL) * (currentPlaybackRate * rep.bitrateInKbit / lastThroughput));//Lagrangian descent + } + + // Apply euclidean projection on w to ensure w expresses a probability distribution + l2AParameter.w = euclideanProjection(l2AParameter.w); + + for (let i = 0; i < possibleRepresentations.length; ++i) { + diff1[i] = l2AParameter.w[i] - l2AParameter.prev_w[i]; + l2AParameter.prev_w[i] = l2AParameter.w[i]; + } + + // Lagrangian multiplier Q calculation: + const bitrates = possibleRepresentations.map((rep) => { + return rep.bandwidth; + }) + l2AParameter.Q = Math.max(0, l2AParameter.Q - V + V * currentPlaybackRate * ((_dotmultiplication(bitrates, l2AParameter.prev_w) + _dotmultiplication(bitrates, diff1)) / lastThroughput)); + + // Quality is calculated as argmin of the absolute difference between available bitrates (bitrates[i]) and bitrate estimation (dotmultiplication(w,bitrates)). + let temp = []; + for (let i = 0; i < bitrates.length; ++i) { + temp[i] = Math.abs(bitrates[i] - _dotmultiplication(l2AParameter.w, bitrates)); + } + + // Quality is calculated based on the probability distribution w (the output of L2A) + const absoluteIndex = temp.indexOf(Math.min(...temp)); + selectedRepresentation = abrController.getRepresentationByAbsoluteIndex(absoluteIndex, mediaInfo, true); + + // We employ a cautious -stepwise- ascent + if (selectedRepresentation.absoluteIndex > l2AState.currentRepresentation.absoluteIndex) { + if (bitrates[l2AState.currentRepresentation.absoluteIndex + 1] <= lastThroughput) { + selectedRepresentation = abrController.getRepresentationByAbsoluteIndex(l2AState.currentRepresentation.absoluteIndex + 1, mediaInfo, true); + } + } + + // Provision against bitrate over-estimation, by re-calibrating the Lagrangian multiplier Q, to be taken into account for the next chunk + if (selectedRepresentation.bitrateInKbit >= lastThroughput) { + l2AParameter.Q = REACT * Math.max(VL, l2AParameter.Q); + } + l2AState.lastSegmentUrl = currentHttpRequest.url; + } + switchRequest.representation = selectedRepresentation; + l2AState.currentRepresentation = switchRequest.representation; + } + + function _handleErrorState(rulesContext, switchRequest, l2AState) { + const abrController = rulesContext.getAbrController(); + const mediaInfo = rulesContext.getMediaInfo(); + const mediaType = rulesContext.getMediaType(); + const throughputController = rulesContext.getThroughputController(); + const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); + + switchRequest.representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, safeThroughput, true);//During strat-up phase abr.controller is responsible for bitrate decisions. + switchRequest.reason.throughput = safeThroughput; + l2AState.state = L2A_STATE_STARTUP; + _clearL2AStateOnSeek(l2AState); + } + /** * Returns a switch request object indicating which quality is to be played * @param {object} rulesContext * @return {object} */ - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { try { const switchRequest = SwitchRequest(context).create(); - const horizon = 4; // Optimization horizon (The amount of steps required to achieve convergence) - const vl = Math.pow(horizon, 0.99);// Cautiousness parameter, used to control aggressiveness of the bitrate decision process. - const alpha = Math.max(Math.pow(horizon, 1), vl * Math.sqrt(horizon));// Step size, used for gradient descent exploration granularity - const mediaInfo = rulesContext.getMediaInfo(); + switchRequest.rule = this.getClassName(); const mediaType = rulesContext.getMediaType(); - const bitrates = mediaInfo.bitrateList.map(b => b.bandwidth); - const bitrateCount = bitrates.length; const scheduleController = rulesContext.getScheduleController(); - const streamInfo = rulesContext.getStreamInfo(); - const abrController = rulesContext.getAbrController(); - const throughputController = rulesContext.getThroughputController(); - const bufferLevel = dashMetrics.getCurrentBufferLevel(mediaType, true); - const safeThroughput = throughputController.getSafeAverageThroughput(mediaType); - const throughput = throughputController.getAverageThroughput(mediaType); // In kbits/s - const react = 2; // Reactiveness to volatility (abrupt throughput drops), used to re-calibrate Lagrangian multiplier Q - const latency = throughputController.getAverageLatency(mediaType); - const videoModel = rulesContext.getVideoModel(); - let quality; - let currentPlaybackRate = videoModel.getPlaybackRate(); - - if (!rulesContext || !rulesContext.hasOwnProperty('getMediaInfo') || !rulesContext.hasOwnProperty('getMediaType') || - !rulesContext.hasOwnProperty('getScheduleController') || !rulesContext.hasOwnProperty('getStreamInfo') || - !rulesContext.hasOwnProperty('getAbrController')) { - return switchRequest; - } switchRequest.reason = switchRequest.reason || {}; - if ((mediaType === Constants.AUDIO)) {// L2A decides bitrate only for video. Audio to be included in decision process in a later stage + // L2A decides bitrate only for video. Audio to be included in decision process in a later stage + if ((mediaType === Constants.AUDIO)) { return switchRequest; } scheduleController.setTimeToLoadDelay(0); const l2AState = _getL2AState(rulesContext); - - if (l2AState.state === L2A_STATE_ONE_BITRATE) { - // shouldn't even have been called - return switchRequest; - } - const l2AParameter = l2AParameterDict[mediaType]; - if (!l2AParameter) { return switchRequest; } switchRequest.reason.state = l2AState.state; - switchRequest.reason.throughput = throughput; - switchRequest.reason.latency = latency; - - if (isNaN(throughput)) { - // still starting up - not enough information - return switchRequest; - } switch (l2AState.state) { + case L2A_STATE_ONE_BITRATE: + break; case L2A_STATE_STARTUP: - quality = abrController.getQualityForBitrate(mediaInfo, safeThroughput, streamInfo.id, latency);//During strat-up phase abr.controller is responsible for bitrate decisions. - switchRequest.quality = quality; - switchRequest.reason.throughput = safeThroughput; - l2AState.lastQuality = quality; - - if (!isNaN(l2AState.lastSegmentDurationS) && bufferLevel >= l2AParameter.B_target) { - l2AState.state = L2A_STATE_STEADY; - l2AParameter.Q = vl;// Initialization of Q langrangian multiplier - // Update of probability vector w, to be used in main adaptation logic of L2A below (steady state) - for (let i = 0; i < bitrateCount; ++i) { - if (i === l2AState.lastQuality) { - l2AParameter.prev_w[i] = 1; - } else { - l2AParameter.prev_w[i] = 0; - } - } - } - - break; // L2A_STATE_STARTUP + _handleStartupState(rulesContext, switchRequest, l2AState); + break; case L2A_STATE_STEADY: - let diff1 = [];//Used to calculate the difference between consecutive decisions (w-w_prev) - - // Manual calculation of latency and throughput during previous request - let throughputMeasureTime = dashMetrics.getCurrentHttpRequest(mediaType).trace.reduce((a, b) => a + b.d, 0); - const downloadBytes = dashMetrics.getCurrentHttpRequest(mediaType).trace.reduce((a, b) => a + b.b[0], 0); - let lastthroughput = Math.round((8 * downloadBytes) / throughputMeasureTime); // bits/ms = kbits/s - let currentHttpRequest = dashMetrics.getCurrentHttpRequest(mediaType); - - if (lastthroughput < 1) { - lastthroughput = 1; - }//To avoid division with 0 (avoid infinity) in case of an absolute network outage - - // Note that for SegmentBase addressing the request url does not change. - // As this is not relevant for low latency streaming at this point the check below is sufficient - if (currentHttpRequest.url === l2AState.lastSegmentUrl || - currentHttpRequest.type === HTTPRequest.INIT_SEGMENT_TYPE) { - // No change to inputs or init segment so use previously calculated quality - quality = l2AState.lastQuality; - - } else { // Recalculate Q - - let V = l2AState.lastSegmentDurationS; - let sign = 1; - - //Main adaptation logic of L2A-LL - for (let i = 0; i < bitrateCount; ++i) { - bitrates[i] = bitrates[i] / 1000; // Originally in bps, now in Kbps - if (currentPlaybackRate * bitrates[i] > lastthroughput) {// In this case buffer would deplete, leading to a stall, which increases latency and thus the particular probability of selsection of bitrate[i] should be decreased. - sign = -1; - } - // The objective of L2A is to minimize the overall latency=request-response time + buffer length after download+ potential stalling (if buffer less than chunk downlad time) - l2AParameter.w[i] = l2AParameter.prev_w[i] + sign * (V / (2 * alpha)) * ((l2AParameter.Q + vl) * (currentPlaybackRate * bitrates[i] / lastthroughput));//Lagrangian descent - } - - // Apply euclidean projection on w to ensure w expresses a probability distribution - l2AParameter.w = euclideanProjection(l2AParameter.w); - - for (let i = 0; i < bitrateCount; ++i) { - diff1[i] = l2AParameter.w[i] - l2AParameter.prev_w[i]; - l2AParameter.prev_w[i] = l2AParameter.w[i]; - } - - // Lagrangian multiplier Q calculation: - l2AParameter.Q = Math.max(0, l2AParameter.Q - V + V * currentPlaybackRate * ((_dotmultiplication(bitrates, l2AParameter.prev_w) + _dotmultiplication(bitrates, diff1)) / lastthroughput)); - - // Quality is calculated as argmin of the absolute difference between available bitrates (bitrates[i]) and bitrate estimation (dotmultiplication(w,bitrates)). - let temp = []; - for (let i = 0; i < bitrateCount; ++i) { - temp[i] = Math.abs(bitrates[i] - _dotmultiplication(l2AParameter.w, bitrates)); - } - - // Quality is calculated based on the probability distribution w (the output of L2A) - quality = temp.indexOf(Math.min(...temp)); - - // We employ a cautious -stepwise- ascent - if (quality > l2AState.lastQuality) { - if (bitrates[l2AState.lastQuality + 1] <= lastthroughput) { - quality = l2AState.lastQuality + 1; - } - } - - // Provision against bitrate over-estimation, by re-calibrating the Lagrangian multiplier Q, to be taken into account for the next chunk - if (bitrates[quality] >= lastthroughput) { - l2AParameter.Q = react * Math.max(vl, l2AParameter.Q); - } - l2AState.lastSegmentUrl = currentHttpRequest.url; - } - switchRequest.quality = quality; - switchRequest.reason.throughput = throughput; - switchRequest.reason.latency = latency; - switchRequest.reason.bufferLevel = bufferLevel; - l2AState.lastQuality = switchRequest.quality; + _handleSteadyState(rulesContext, switchRequest, l2AState); break; default: - // should not arrive here, try to recover - logger.debug('L2A ABR rule invoked in bad state.'); - switchRequest.quality = abrController.getQualityForBitrate(mediaInfo, safeThroughput, streamInfo.id, latency); - switchRequest.reason.state = l2AState.state; - switchRequest.reason.throughput = safeThroughput; - switchRequest.reason.latency = latency; - l2AState.state = L2A_STATE_STARTUP; - _clearL2AStateOnSeek(l2AState); + _handleErrorState(rulesContext, switchRequest, l2AState) } return switchRequest; } catch (e) { @@ -489,12 +485,11 @@ function L2ARule(config) { eventBus.off(Events.PLAYBACK_SEEKING, _onPlaybackSeeking, instance); eventBus.off(Events.MEDIA_FRAGMENT_LOADED, _onMediaFragmentLoaded, instance); eventBus.off(Events.METRIC_ADDED, _onMetricAdded, instance); - eventBus.off(Events.QUALITY_CHANGE_REQUESTED, _onQualityChangeRequested, instance); } instance = { - getMaxIndex: getMaxIndex, - reset: reset + getSwitchRequest, + reset }; setup(); diff --git a/src/streaming/rules/abr/SwitchHistoryRule.js b/src/streaming/rules/abr/SwitchHistoryRule.js index 465f360741..a2069363a9 100644 --- a/src/streaming/rules/abr/SwitchHistoryRule.js +++ b/src/streaming/rules/abr/SwitchHistoryRule.js @@ -1,4 +1,3 @@ - import FactoryMaker from '../../../core/FactoryMaker.js'; import Debug from '../../../core/Debug.js'; import SwitchRequest from '../SwitchRequest.js'; @@ -21,24 +20,42 @@ function SwitchHistoryRule() { logger = Debug(context).getInstance().getLogger(instance); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { + const switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); + + if (!rulesContext) { + return switchRequest; + } const switchRequestHistory = rulesContext ? rulesContext.getSwitchHistory() : null; const switchRequests = switchRequestHistory ? switchRequestHistory.getSwitchRequests() : []; + const abrController = rulesContext.getAbrController(); + const mediaInfo = rulesContext.getMediaInfo(); let drops = 0; let noDrops = 0; let dropSize = 0; - const switchRequest = SwitchRequest(context).create(); - for (let i = 0; i < switchRequests.length; i++) { - if (switchRequests[i] !== undefined) { - drops += switchRequests[i].drops; - noDrops += switchRequests[i].noDrops; - dropSize += switchRequests[i].dropSize; + switchRequest.rule = this.getClassName(); + + const representations = abrController.getPossibleVoRepresentations(mediaInfo, true); + + for (let i = 0; i < representations.length; i++) { + const currentRepresentation = representations[i]; + if (currentRepresentation && switchRequests[currentRepresentation.id]) { + drops += switchRequests[currentRepresentation.id].drops; + noDrops += switchRequests[currentRepresentation.id].noDrops; + dropSize += switchRequests[currentRepresentation.id].dropSize; if (drops + noDrops >= SAMPLE_SIZE && (drops / noDrops > MAX_SWITCH)) { - switchRequest.quality = (i > 0 && switchRequests[i].drops > 0) ? i - 1 : i; - switchRequest.reason = {index: switchRequest.quality, drops: drops, noDrops: noDrops, dropSize: dropSize}; - logger.debug('Switch history rule index: ' + switchRequest.quality + ' samples: ' + (drops + noDrops) + ' drops: ' + drops); + const targetRepresentation = (i > 0 && switchRequests[currentRepresentation.id].drops > 0) ? representations[i - 1] : currentRepresentation; + switchRequest.representation = targetRepresentation; + switchRequest.reason = { + index: switchRequest.quality, + drops: drops, + noDrops: noDrops, + dropSize: dropSize + }; + logger.debug('Switch history rule index: ' + switchRequest.representation.absoluteIndex + ' samples: ' + (drops + noDrops) + ' drops: ' + drops); break; } } @@ -48,7 +65,7 @@ function SwitchHistoryRule() { } instance = { - getMaxIndex: getMaxIndex + getSwitchRequest }; setup(); diff --git a/src/streaming/rules/abr/ThroughputRule.js b/src/streaming/rules/abr/ThroughputRule.js index f703c59718..674017db91 100644 --- a/src/streaming/rules/abr/ThroughputRule.js +++ b/src/streaming/rules/abr/ThroughputRule.js @@ -46,9 +46,10 @@ function ThroughputRule(config) { logger = Debug(context).getInstance().getLogger(instance); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { try { const switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); const mediaInfo = rulesContext.getMediaInfo(); const mediaType = rulesContext.getMediaType(); const currentBufferState = dashMetrics.getCurrentBufferState(mediaType); @@ -67,9 +68,9 @@ function ThroughputRule(config) { if (abrController.getAbandonmentStateFor(streamId, mediaType) === MetricsConstants.ALLOW_LOAD) { if (currentBufferState.state === MetricsConstants.BUFFER_LOADED || isDynamic) { - switchRequest.quality = abrController.getQualityForBitrate(mediaInfo, throughput, streamId, latency); - scheduleController.setTimeToLoadDelay(0); + switchRequest.representation = abrController.getOptimalRepresentationForBitrate(mediaInfo, throughput, true); switchRequest.reason = { throughput: throughput, latency: latency }; + scheduleController.setTimeToLoadDelay(0); } } @@ -85,7 +86,7 @@ function ThroughputRule(config) { } instance = { - getMaxIndex, + getSwitchRequest, reset }; diff --git a/src/streaming/rules/abr/lolp/LearningAbrController.js b/src/streaming/rules/abr/lolp/LearningAbrController.js index 4d28f5f743..b05781871b 100644 --- a/src/streaming/rules/abr/lolp/LearningAbrController.js +++ b/src/streaming/rules/abr/lolp/LearningAbrController.js @@ -157,9 +157,9 @@ function LearningAbrController() { * @param {array} x * @private */ - function _updateNeurons(winnerNeuron, somElements, x) { - for (let i = 0; i < somElements.length; i++) { - let somNeuron = somElements[i]; + function _updateNeurons(winnerNeuron, x) { + for (let i = 0; i < somBitrateNeurons.length; i++) { + let somNeuron = somBitrateNeurons[i]; let sigma = 0.1; const neuronDistance = _getNeuronDistance(somNeuron, winnerNeuron); let neighbourHood = Math.exp(-1 * Math.pow(neuronDistance, 2) / (2 * Math.pow(sigma, 2))); @@ -198,9 +198,9 @@ function LearningAbrController() { if (somBitrateNeurons) { for (let i = 0; i < somBitrateNeurons.length; i++) { let n = somBitrateNeurons[i]; - if (n.bitrate < currentNeuron.bitrate && n.bitrate > maxSuitableBitrate && currentThroughput > n.bitrate) { + if (n.representation.bandwidth < currentNeuron.representation.bandwidth && n.representation.bandwidth > maxSuitableBitrate && currentThroughput > n.representation.bandwidth) { // possible downshiftable neuron - maxSuitableBitrate = n.bitrate; + maxSuitableBitrate = n.representation.bandwidth; result = n; } } @@ -211,22 +211,22 @@ function LearningAbrController() { /** * + * @param {object} abrController * @param {object} mediaInfo * @param {number} throughput * @param {number} latency - * @param {number} bufferSize + * @param {number} currentBufferLevel * @param {number} playbackRate - * @param {number} currentQualityIndex + * @param {object} currentRepresentation * @param {object} dynamicWeightsSelector * @return {null|*} */ - function getNextQuality(mediaInfo, throughput, latency, bufferSize, playbackRate, currentQualityIndex, dynamicWeightsSelector) { + function getNextQuality(abrController, mediaInfo, throughput, latency, currentBufferLevel, playbackRate, currentRepresentation, dynamicWeightsSelector) { // For Dynamic Weights Selector let currentLatency = latency; - let currentBuffer = bufferSize; let currentThroughput = throughput; - let somElements = _getSomBitrateNeurons(mediaInfo); + _setSomBitrateNeurons(mediaInfo, abrController); // normalize throughput let throughputNormalized = throughput / bitrateNormalizationFactor; // saturate values higher than 1 @@ -238,20 +238,17 @@ function LearningAbrController() { const targetLatency = 0; const targetRebufferLevel = 0; - const targetSwitch = 0; // 10K + video encoding is the recommended throughput const throughputDelta = 10000; - logger.debug(`getNextQuality called throughput:${throughputNormalized} latency:${latency} bufferSize:${bufferSize} currentQualityIndex:${currentQualityIndex} playbackRate:${playbackRate}`); - - let currentNeuron = somElements[currentQualityIndex]; - let downloadTime = (currentNeuron.bitrate * dynamicWeightsSelector.getSegmentDuration()) / currentThroughput; - let rebuffer = Math.max(0, (downloadTime - currentBuffer)); + let currentNeuron = somBitrateNeurons.find(entry => entry.representation.id === currentRepresentation.id); + let downloadTime = (currentNeuron.representation.bandwidth * dynamicWeightsSelector.getSegmentDuration()) / currentThroughput; + let rebuffer = Math.max(0, (downloadTime - currentBufferLevel)); // check buffer for possible stall - if (currentBuffer - downloadTime < dynamicWeightsSelector.getMinBuffer()) { - logger.debug(`Buffer is low for bitrate= ${currentNeuron.bitrate} downloadTime=${downloadTime} currentBuffer=${currentBuffer} rebuffer=${rebuffer}`); - return _getDownShiftNeuron(currentNeuron, currentThroughput).qualityIndex; + if (currentBufferLevel - downloadTime < dynamicWeightsSelector.getMinBuffer()) { + logger.debug(`Buffer is low for bitrate= ${currentNeuron.representation.bandwidth} downloadTime=${downloadTime} currentBuffer=${currentBufferLevel} rebuffer=${rebuffer}`); + return _getDownShiftNeuron(currentNeuron, currentThroughput).representation; } switch (weightSelectionMode) { @@ -259,22 +256,22 @@ function LearningAbrController() { _manualWeightSelection(); break; case WEIGHT_SELECTION_MODES.RANDOM: - _randomWeightSelection(somElements); + _randomWeightSelection(); break; case WEIGHT_SELECTION_MODES.DYNAMIC: - _dynamicWeightSelection(dynamicWeightsSelector, somElements, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate); + _dynamicWeightSelection(dynamicWeightsSelector, currentLatency, currentBufferLevel, rebuffer, currentThroughput, playbackRate); break; default: - _dynamicWeightSelection(dynamicWeightsSelector, somElements, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate); + _dynamicWeightSelection(dynamicWeightsSelector, currentLatency, currentBufferLevel, rebuffer, currentThroughput, playbackRate); } let minDistance = null; - let minIndex = null; + let targetRepresentation = null; let winnerNeuron = null; - for (let i = 0; i < somElements.length; i++) { - let somNeuron = somElements[i]; + for (let i = 0; i < somBitrateNeurons.length; i++) { + let somNeuron = somBitrateNeurons[i]; let somNeuronState = somNeuron.state; let somData = [somNeuronState.throughput, somNeuronState.latency, @@ -282,37 +279,37 @@ function LearningAbrController() { somNeuronState.switch]; let distanceWeights = weights.slice(); - let nextBuffer = dynamicWeightsSelector.getNextBufferWithBitrate(somNeuron.bitrate, currentBuffer, currentThroughput); + let nextBuffer = dynamicWeightsSelector.getNextBufferWithBitrate(somNeuron.representation.bandwidth, currentBufferLevel, currentThroughput); let isBufferLow = nextBuffer < dynamicWeightsSelector.getMinBuffer(); if (isBufferLow) { - logger.debug(`Buffer is low for bitrate=${somNeuron.bitrate} downloadTime=${downloadTime} currentBuffer=${currentBuffer} nextBuffer=${nextBuffer}`); + logger.debug(`Buffer is low for bitrate=${somNeuron.representation.bandwidth} downloadTime=${downloadTime} currentBuffer=${currentBufferLevel} nextBuffer=${nextBuffer}`); } // special condition downshift immediately - if (somNeuron.bitrate > throughput - throughputDelta || isBufferLow) { - if (somNeuron.bitrate !== minBitrate) { + if (somNeuron.representation.bandwidth > throughput - throughputDelta || isBufferLow) { + if (somNeuron.representation.bandwidth !== minBitrate) { // encourage to pick smaller bitrates throughputWeight=100 distanceWeights[0] = 100; } } // calculate the distance with the target - let distance = _getDistance(somData, [throughputNormalized, targetLatency, targetRebufferLevel, targetSwitch], distanceWeights); + let distance = _getDistance(somData, [throughputNormalized, targetLatency, targetRebufferLevel, 0], distanceWeights); if (minDistance === null || distance < minDistance) { minDistance = distance; - minIndex = somNeuron.qualityIndex; + targetRepresentation = somNeuron.representation; winnerNeuron = somNeuron; } } // update current neuron and the neighbourhood with the calculated QoE // will punish current if it is not picked - let bitrateSwitch = Math.abs(currentNeuron.bitrate - winnerNeuron.bitrate) / bitrateNormalizationFactor; - _updateNeurons(currentNeuron, somElements, [throughputNormalized, latency, rebuffer, bitrateSwitch]); + let bitrateSwitch = Math.abs(currentNeuron.representation.bandwidth - winnerNeuron.representation.bandwidth) / bitrateNormalizationFactor; + _updateNeurons(currentNeuron, [throughputNormalized, latency, rebuffer, bitrateSwitch]); // update bmu and neighbours with targetQoE=1, targetLatency=0 - _updateNeurons(winnerNeuron, somElements, [throughputNormalized, targetLatency, targetRebufferLevel, bitrateSwitch]); + _updateNeurons(winnerNeuron, [throughputNormalized, targetLatency, targetRebufferLevel, bitrateSwitch]); - return minIndex; + return targetRepresentation; } /** @@ -330,11 +327,10 @@ function LearningAbrController() { /** * Option 2: Random (Xavier) weights - * @param {array} somElements * @private */ - function _randomWeightSelection(somElements) { - weights = _getXavierWeights(somElements.length, 4); + function _randomWeightSelection() { + weights = _getXavierWeights(somBitrateNeurons.length, 4); } /** @@ -348,12 +344,12 @@ function LearningAbrController() { * @param {number} playbackRate * @private */ - function _dynamicWeightSelection(dynamicWeightsSelector, somElements, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate) { + function _dynamicWeightSelection(dynamicWeightsSelector, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate) { if (!weights) { weights = sortedCenters[sortedCenters.length - 1]; } // Dynamic Weights Selector (step 2/2: find weights) - let weightVector = dynamicWeightsSelector.findWeightVector(somElements, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate); + let weightVector = dynamicWeightsSelector.findWeightVector(somBitrateNeurons, currentLatency, currentBuffer, rebuffer, currentThroughput, playbackRate); if (weightVector !== null && weightVector !== -1) { // null: something went wrong, -1: constraints not met weights = weightVector; } @@ -382,38 +378,31 @@ function LearningAbrController() { /** * * @param {object} mediaInfo + * @param abrController * @return {array} * @private */ - function _getSomBitrateNeurons(mediaInfo) { + function _setSomBitrateNeurons(mediaInfo, abrController) { if (!somBitrateNeurons) { somBitrateNeurons = []; - const bitrateList = mediaInfo.bitrateList; - let bitrateVector = []; - minBitrate = bitrateList[0].bandwidth; - - bitrateList.forEach(element => { - bitrateVector.push(element.bandwidth); - if (element.bandwidth < minBitrate) { - minBitrate = element.bandwidth; - } - }); - bitrateNormalizationFactor = _getMagnitude(bitrateVector); + const possibleRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + const bitrateList = possibleRepresentations.map((r) => r.bandwidth); + minBitrate = Math.min(...bitrateList); + bitrateNormalizationFactor = _getMagnitude(bitrateList); - for (let i = 0; i < bitrateList.length; i++) { + possibleRepresentations.forEach((rep) => { let neuron = { - qualityIndex: i, - bitrate: bitrateList[i].bandwidth, + representation: rep, state: { // normalize throughputs - throughput: bitrateList[i].bandwidth / bitrateNormalizationFactor, + throughput: rep.bandwidth / bitrateNormalizationFactor, latency: 0, rebuffer: 0, switch: 0 } }; somBitrateNeurons.push(neuron); - } + }) sortedCenters = _getInitialKmeansPlusPlusCenters(somBitrateNeurons); } @@ -445,17 +434,17 @@ function LearningAbrController() { /** * - * @param {array} somElements + * @param {array} somBitrateNeurons * @return {array} * @private */ - function _getInitialKmeansPlusPlusCenters(somElements) { + function _getInitialKmeansPlusPlusCenters(somBitrateNeurons) { let centers = []; - let randomDataSet = _getRandomData(Math.pow(somElements.length, 2)); + let randomDataSet = _getRandomData(Math.pow(somBitrateNeurons.length, 2)); centers.push(randomDataSet[0]); let distanceWeights = [1, 1, 1, 1]; - for (let k = 1; k < somElements.length; k++) { + for (let k = 1; k < somBitrateNeurons.length; k++) { let nextPoint = null; let maxDistance = null; for (let i = 0; i < randomDataSet.length; i++) { diff --git a/src/streaming/rules/abr/lolp/LoLpRule.js b/src/streaming/rules/abr/lolp/LoLpRule.js index 9bbb544558..79b44cd6d9 100644 --- a/src/streaming/rules/abr/lolp/LoLpRule.js +++ b/src/streaming/rules/abr/lolp/LoLpRule.js @@ -66,13 +66,14 @@ function LoLPRule(config) { qoeEvaluator = LoLpQoeEvaluator(context).create(); } - function getMaxIndex(rulesContext) { + function getSwitchRequest(rulesContext) { try { let switchRequest = SwitchRequest(context).create(); + switchRequest.rule = this.getClassName(); let mediaType = rulesContext.getMediaInfo().type; let abrController = rulesContext.getAbrController(); const streamInfo = rulesContext.getStreamInfo(); - let currentQuality = abrController.getQualityFor(mediaType, streamInfo.id); + let currentRepresentation = rulesContext.getRepresentation(); const mediaInfo = rulesContext.getMediaInfo(); const bufferStateVO = dashMetrics.getCurrentBufferState(mediaType); const scheduleController = rulesContext.getScheduleController(); @@ -102,21 +103,14 @@ function LoLPRule(config) { } // QoE parameters - let bitrateList = mediaInfo.bitrateList; // [{bandwidth: 200000, width: 640, height: 360}, ...] - let segmentDuration = rulesContext.getRepresentationInfo().fragmentDuration; - let minBitrateKbps = bitrateList[0].bandwidth / 1000.0; // min bitrate level - let maxBitrateKbps = bitrateList[bitrateList.length - 1].bandwidth / 1000.0; // max bitrate level - for (let i = 0; i < bitrateList.length; i++) { // in case bitrateList is not sorted as expected - let b = bitrateList[i].bandwidth / 1000.0; - if (b > maxBitrateKbps) - maxBitrateKbps = b; - else if (b < minBitrateKbps) { - minBitrateKbps = b; - } - } + const possibleRepresentations = abrController.getPossibleVoRepresentations(mediaInfo, true); + let bandwidths = possibleRepresentations.map(r => r.bandwidth); + let segmentDuration = rulesContext.getRepresentation().fragmentDuration; + let minBitrateKbps = Math.min(...bandwidths) / 1000.0; // min bitrate level + let maxBitrateKbps = Math.max(...bandwidths) / 1000.0; // max bitrate level // Learning rule pre-calculations - let currentBitrate = bitrateList[currentQuality].bandwidth; + let currentBitrate = currentRepresentation.bandwidth; let currentBitrateKbps = currentBitrate / 1000.0; let httpRequest = dashMetrics.getCurrentHttpRequest(mediaType, true); let lastFragmentDownloadTime = (httpRequest.tresponse.getTime() - httpRequest.trequest.getTime()) / 1000; @@ -137,16 +131,21 @@ function LoLPRule(config) { /* * Select next quality */ - switchRequest.quality = learningController.getNextQuality(mediaInfo, throughput * 1000, latency, currentBufferLevel, playbackRate, currentQuality, dynamicWeightsSelector); + switchRequest.representation = learningController.getNextQuality( + abrController, + mediaInfo, + throughput * 1000, + latency, + currentBufferLevel, + playbackRate, + currentRepresentation, + dynamicWeightsSelector + ); switchRequest.reason = { throughput: throughput, latency: latency }; switchRequest.priority = SwitchRequest.PRIORITY.STRONG; scheduleController.setTimeToLoadDelay(0); - if (switchRequest.quality !== currentQuality) { - logger.debug('[TgcLearningRule][' + mediaType + '] requesting switch to index: ', switchRequest.quality, 'Average throughput', Math.round(throughput), 'kbps'); - } - return switchRequest; } catch (e) { throw e; @@ -170,7 +169,7 @@ function LoLPRule(config) { } instance = { - getMaxIndex, + getSwitchRequest, reset }; diff --git a/src/streaming/rules/abr/lolp/LoLpWeightSelector.js b/src/streaming/rules/abr/lolp/LoLpWeightSelector.js index fa1d760eaf..21f2b29915 100644 --- a/src/streaming/rules/abr/lolp/LoLpWeightSelector.js +++ b/src/streaming/rules/abr/lolp/LoLpWeightSelector.js @@ -103,7 +103,7 @@ function LoLpWeightSelector(config) { switch: weightVector[3] }; - let downloadTime = (neuron.bitrate * segmentDuration) / currentThroughput; + let downloadTime = (neuron.representation.bandwidth * segmentDuration) / currentThroughput; let nextBuffer = getNextBuffer(currentBuffer, downloadTime); let rebuffer = Math.max(0.00001, (downloadTime - nextBuffer)); let wt; @@ -121,11 +121,11 @@ function LoLpWeightSelector(config) { } let weightedLatency = wt * neuron.state.latency; - let totalQoE = qoeEvaluator.calculateSingleUseQoe(neuron.bitrate, weightedRebuffer, weightedLatency, playbackRate); + let totalQoE = qoeEvaluator.calculateSingleUseQoe(neuron.representation.bandwidth, weightedRebuffer, weightedLatency, playbackRate); if ((maxQoE === null || totalQoE > maxQoE) && _checkConstraints(currentLatency, nextBuffer, deltaLatency)) { maxQoE = totalQoE; winnerWeights = weightVector; - winnerBitrate = neuron.bitrate; + winnerBitrate = neuron.representation.bandwidth; } }); }); diff --git a/src/streaming/text/TextSourceBuffer.js b/src/streaming/text/TextSourceBuffer.js index 721d81e4f0..415dd2f94b 100644 --- a/src/streaming/text/TextSourceBuffer.js +++ b/src/streaming/text/TextSourceBuffer.js @@ -175,7 +175,7 @@ function TextSourceBuffer(config) { function _onVideoChunkReceived(e) { const chunk = e.chunk; - if (chunk.mediaInfo.embeddedCaptions) { + if (chunk.representation.mediaInfo.embeddedCaptions) { append(chunk.bytes, chunk); } } @@ -244,7 +244,7 @@ function TextSourceBuffer(config) { } function _getKind(mediaInfo, trackKindMap) { - let kind = (mediaInfo.roles && mediaInfo.roles.length > 0) ? trackKindMap[mediaInfo.roles[0]] : trackKindMap.caption; + let kind = (mediaInfo.roles && mediaInfo.roles.length > 0) ? trackKindMap[mediaInfo.roles[0].value] : trackKindMap.caption; kind = (kind === trackKindMap.caption || kind === trackKindMap.subtitle) ? kind : trackKindMap.caption; @@ -252,7 +252,7 @@ function TextSourceBuffer(config) { } function append(bytes, chunk) { - const mediaInfo = chunk.mediaInfo; + const mediaInfo = chunk.representation.mediaInfo; const mediaType = mediaInfo.type; const mimeType = mediaInfo.mimeType; const codecType = mediaInfo.codec || mimeType; diff --git a/src/streaming/thumbnail/ThumbnailController.js b/src/streaming/thumbnail/ThumbnailController.js index aa0d71e53b..391e11abb7 100644 --- a/src/streaming/thumbnail/ThumbnailController.js +++ b/src/streaming/thumbnail/ThumbnailController.js @@ -30,10 +30,8 @@ */ import FactoryMaker from '../../core/FactoryMaker.js'; -import Constants from '../constants/Constants.js'; import Thumbnail from '../vo/Thumbnail.js'; import ThumbnailTracks from './ThumbnailTracks.js'; -import BitrateInfo from '../vo/BitrateInfo.js'; import {replaceTokenForTemplate, unescapeDollarsInTemplate} from '../../dash/utils/SegmentsUtils.js'; function ThumbnailController(config) { @@ -131,23 +129,21 @@ function ThumbnailController(config) { thumbnailTracks.setTrackByIndex(index); } + function setTrackById(id) { + thumbnailTracks.setTrackById(id); + } + function getCurrentTrackIndex() { return thumbnailTracks.getCurrentTrackIndex(); } - function getBitrateList() { - const tracks = thumbnailTracks.getTracks(); - let i = 0; - - return tracks.map((t) => { - const bitrateInfo = new BitrateInfo(); - bitrateInfo.mediaType = Constants.IMAGE; - bitrateInfo.qualityIndex = i++; - bitrateInfo.bitrate = t.bitrate; - bitrateInfo.width = t.width; - bitrateInfo.height = t.height; - return bitrateInfo; - }); + function getCurrentTrack() { + return thumbnailTracks.getCurrentTrack(); + } + + function getPossibleVoRepresentations() { + return thumbnailTracks.getRepresentations(); + } function reset() { @@ -157,13 +153,15 @@ function ThumbnailController(config) { } instance = { + getCurrentTrack, + getCurrentTrackIndex, + getPossibleVoRepresentations, getStreamId, initialize, provide, + reset, setTrackByIndex, - getCurrentTrackIndex, - getBitrateList, - reset + setTrackById }; setup(); diff --git a/src/streaming/thumbnail/ThumbnailTracks.js b/src/streaming/thumbnail/ThumbnailTracks.js index d60714d8c0..c69619be40 100644 --- a/src/streaming/thumbnail/ThumbnailTracks.js +++ b/src/streaming/thumbnail/ThumbnailTracks.js @@ -57,6 +57,7 @@ function ThumbnailTracks(config) { let instance, tracks, + representations, dashHandler, currentTrackIndex, mediaInfo, @@ -119,6 +120,8 @@ function ThumbnailTracks(config) { if (rep.segmentInfoType === DashConstants.SEGMENT_BASE) { _createTrack(rep, true); } + + representations.push(rep); }); } @@ -174,6 +177,7 @@ function ThumbnailTracks(config) { let cache = []; const segments = _normalizeSegments(data, representation); representation.segments = segments; + representation.fragmentDuration = representation.segmentDuration || (representation.segments && representation.segments.length > 0 ? representation.segments[0].duration : NaN); track.segmentDuration = representation.segments[0].duration; //assume all segments have the same duration track.readThumbnail = function (time, callback) { @@ -287,6 +291,20 @@ function ThumbnailTracks(config) { currentTrackIndex = index; } + function setTrackById(id) { + if (!tracks || tracks.length === 0) { + return; + } + + const index = tracks.findIndex((elem) => { + return elem.id === id + }) + + if (index !== -1) { + currentTrackIndex = index; + } + } + function getThumbnailRequestForTime(time) { let currentVoRep; const voReps = adapter.getVoRepresentations(mediaInfo); @@ -300,20 +318,27 @@ function ThumbnailTracks(config) { return dashHandler.getSegmentRequestForTime(mediaInfo, currentVoRep, time); } + function getRepresentations() { + return representations + } + function reset() { tracks = []; + representations = []; currentTrackIndex = -1; mediaInfo = null; } instance = { - getTracks, addTracks, - reset, - setTrackByIndex, getCurrentTrack, getCurrentTrackIndex, - getThumbnailRequestForTime + getRepresentations, + getThumbnailRequestForTime, + getTracks, + reset, + setTrackById, + setTrackByIndex, }; setup(); diff --git a/src/streaming/utils/Capabilities.js b/src/streaming/utils/Capabilities.js index 8a47a588bc..5d1074f3b5 100644 --- a/src/streaming/utils/Capabilities.js +++ b/src/streaming/utils/Capabilities.js @@ -32,17 +32,6 @@ import FactoryMaker from '../../core/FactoryMaker.js'; import {THUMBNAILS_SCHEME_ID_URIS} from '../thumbnail/ThumbnailTracks.js'; import Constants from '../constants/Constants.js'; -const codecCompatibilityTable = [ - { - 'codec': 'avc1', - 'compatibleCodecs': ['avc3'] - }, - { - 'codec': 'avc3', - 'compatibleCodecs': ['avc1'] - } -]; - export function supportsMediaSource() { let hasWebKit = ('WebKitMediaSource' in window); let hasMediaSource = ('MediaSource' in window); @@ -70,6 +59,25 @@ function Capabilities() { } } + function isProtectionCompatible(previousStream, newStream) { + if (!newStream) { + return true; + } + return _compareProtectionConfig(Constants.VIDEO, previousStream, newStream) && _compareProtectionConfig(Constants.AUDIO, previousStream, newStream); + } + + function _compareProtectionConfig(type, previousStream, newStream) { + const previousMediaInfo = previousStream.getCurrentMediaInfoForType(type); + const newMediaInfo = newStream.getCurrentMediaInfoForType(type); + + if (!previousMediaInfo || !newMediaInfo) { + return true; + } + + // If the current period is unencrypted and the upcoming one is encrypted we need to reset sourcebuffers. + return !(!previousMediaInfo.hasProtectedRepresentations && newMediaInfo.hasProtectedRepresentations); + } + /** * Returns whether Encrypted Media Extensions are supported on this * user agent @@ -115,7 +123,6 @@ function Capabilities() { * @private */ function _canUseMediaCapabilitiesApi(config, type) { - return settings.get().streaming.capabilities.useMediaCapabilitiesApi && navigator.mediaCapabilities && navigator.mediaCapabilities.decodingInfo && ((config.codec && type === Constants.AUDIO) || (type === Constants.VIDEO && config.codec && config.width && config.height && config.bitrate && config.framerate)); } @@ -199,36 +206,14 @@ function Capabilities() { } } - /** - * Check if the root of the old codec is the same as the new one, or if it's declared as compatible in the compat table - * @param {string} codec1 - * @param {string} codec2 - * @return {boolean} - */ - function codecRootCompatibleWithCodec(codec1, codec2) { - const codecRoot = codec1.split('.')[0]; - const rootCompatible = codec2.indexOf(codecRoot) === 0; - let compatTableCodec; - for (let i = 0; i < codecCompatibilityTable.length; i++) { - if (codecCompatibilityTable[i].codec === codecRoot) { - compatTableCodec = codecCompatibilityTable[i]; - break; - } - } - if (compatTableCodec) { - return rootCompatible || compatTableCodec.compatibleCodecs.some((compatibleCodec) => codec2.indexOf(compatibleCodec) === 0); - } - return rootCompatible; - } - instance = { + isProtectionCompatible, setConfig, - supportsMediaSource, - supportsEncryptedMedia, - supportsCodec, setEncryptedMediaSupported, + supportsCodec, + supportsEncryptedMedia, supportsEssentialProperty, - codecRootCompatibleWithCodec + supportsMediaSource, }; setup(); diff --git a/src/streaming/utils/InitCache.js b/src/streaming/utils/InitCache.js index f59c606f66..7cdb4d5688 100644 --- a/src/streaming/utils/InitCache.js +++ b/src/streaming/utils/InitCache.js @@ -41,7 +41,7 @@ function InitCache() { function save (chunk) { const id = chunk.streamId; - const representationId = chunk.representationId; + const representationId = chunk.representation.id; data[id] = data[id] || {}; data[id][representationId] = chunk; diff --git a/src/streaming/vo/DataChunk.js b/src/streaming/vo/DataChunk.js index 1bdfcace2f..2418e31e09 100644 --- a/src/streaming/vo/DataChunk.js +++ b/src/streaming/vo/DataChunk.js @@ -37,7 +37,6 @@ class DataChunk { //Represents a data structure that keep all the necessary info about a single init/media segment constructor() { this.streamId = null; - this.mediaInfo = null; this.segmentType = null; this.quality = NaN; this.index = NaN; @@ -45,9 +44,9 @@ class DataChunk { this.start = NaN; this.end = NaN; this.duration = NaN; - this.representationId = null; + this.representation = null; this.endFragment = null; } } -export default DataChunk; \ No newline at end of file +export default DataChunk; diff --git a/src/streaming/vo/FragmentRequest.js b/src/streaming/vo/FragmentRequest.js index 78c1119ea9..2f5f514264 100644 --- a/src/streaming/vo/FragmentRequest.js +++ b/src/streaming/vo/FragmentRequest.js @@ -38,29 +38,29 @@ import { HTTPRequest } from './metrics/HTTPRequest.js'; class FragmentRequest { constructor(url) { this.action = FragmentRequest.ACTION_DOWNLOAD; - this.startTime = NaN; + this.availabilityEndTime = null; + this.availabilityStartTime = null; + this.bandwidth = NaN; + this.bytesLoaded = NaN; + this.bytesTotal = NaN; + this.delayLoadingTime = NaN; + this.duration = NaN; + this.endDate = null; + this.firstByteDate = null; + this.index = NaN; this.mediaStartTime = NaN; this.mediaType = null; - this.mediaInfo = null; - this.type = null; - this.duration = NaN; - this.timescale = NaN; + this.quality = NaN; this.range = null; - this.url = url || null; + this.representation = null; + this.responseType = 'arraybuffer'; this.serviceLocation = null; this.startDate = null; - this.firstByteDate = null; - this.endDate = null; - this.quality = NaN; - this.index = NaN; - this.availabilityStartTime = null; - this.availabilityEndTime = null; + this.startTime = NaN; + this.timescale = NaN; + this.type = null; + this.url = url || null; this.wallStartTime = null; - this.bytesLoaded = NaN; - this.bytesTotal = NaN; - this.delayLoadingTime = NaN; - this.responseType = 'arraybuffer'; - this.representationId = null; } isInitializationRequest() { diff --git a/src/streaming/vo/metrics/ManifestUpdate.js b/src/streaming/vo/metrics/ManifestUpdate.js index 1fa2ed827b..63d4abc818 100644 --- a/src/streaming/vo/metrics/ManifestUpdate.js +++ b/src/streaming/vo/metrics/ManifestUpdate.js @@ -94,7 +94,7 @@ class ManifestUpdate { */ this.streamInfo = []; /** - * Array holding list of RepresentationInfo VO Objects + * Array holding list of ManifestUpdateRepresentationInfo Objects * @public */ this.representationInfo = []; @@ -158,11 +158,6 @@ class ManifestUpdateRepresentationInfo { * @public */ this.mediaType = null; - /** - * Which representation - * @public - */ - this.streamIndex = null; /** * Holds reference to @presentationTimeOffset * @public @@ -173,11 +168,6 @@ class ManifestUpdateRepresentationInfo { * @public */ this.startNumber = null; - /** - * list|template|timeline - * @public - */ - this.fragmentInfoType = null; } } diff --git a/src/streaming/vo/metrics/SchedulingInfo.js b/src/streaming/vo/metrics/SchedulingInfo.js index 78249d5d06..199afa9b26 100644 --- a/src/streaming/vo/metrics/SchedulingInfo.js +++ b/src/streaming/vo/metrics/SchedulingInfo.js @@ -70,10 +70,10 @@ class SchedulingInfo { */ this.duration = null; /** - * Bit Rate Quality of fragment + * Bandwidth of fragment * @public */ - this.quality = null; + this.bandwidth = null; /** * Range of fragment * @public @@ -88,4 +88,4 @@ class SchedulingInfo { } } -export default SchedulingInfo; \ No newline at end of file +export default SchedulingInfo; diff --git a/test/unit/dash.DashAdapter.js b/test/unit/dash.DashAdapter.js index 083bb246b2..9eb0055b57 100644 --- a/test/unit/dash.DashAdapter.js +++ b/test/unit/dash.DashAdapter.js @@ -1,5 +1,4 @@ import DashAdapter from '../../src/dash/DashAdapter.js'; -import MediaInfo from '../../src/dash/vo/MediaInfo.js'; import Constants from '../../src/streaming/constants/Constants.js'; import DashConstants from '../../src/dash/constants/DashConstants.js'; import cea608parser from '../../externals/cea608-parser.js'; @@ -409,20 +408,6 @@ describe('DashAdapter', function () { }); }); - it('should return null when convertRepresentationToRepresentationInfo is called and voRepresentation parameter is null or undefined', function () { - const representationInfo = dashAdapter.convertRepresentationToRepresentationInfo(); - - expect(representationInfo).to.be.null; - }); - - it('should return correct representationInfo when convertRepresentationToRepresentationInfo is called and voRepresentation parameter is well defined', function () { - const voRepresentation = voHelper.getDummyRepresentation(Constants.VIDEO, 0); - const representationInfo = dashAdapter.convertRepresentationToRepresentationInfo(voRepresentation); - - expect(representationInfo).not.to.be.null; - expect(representationInfo.quality).to.equal(0); - }); - it('should return undefined when getVoRepresentations is called and mediaInfo parameter is null or undefined', function () { const voRepresentations = dashAdapter.getVoRepresentations(); @@ -430,7 +415,7 @@ describe('DashAdapter', function () { expect(voRepresentations).to.be.empty; }); - it('should return the first adaptation when getAdaptationForType is called and streamInfo is undefined', () => { + it('should return the first adaptation when getMainAdaptationForType is called and streamInfo is undefined', () => { const manifest_with_video = { loadedTime: new Date(), mediaPresentationDuration: 10, @@ -442,7 +427,7 @@ describe('DashAdapter', function () { }] }; dashAdapter.updatePeriods(manifest_with_video); - const adaptation = dashAdapter.getAdaptationForType(0, Constants.VIDEO); + const adaptation = dashAdapter.getMainAdaptationForType(Constants.VIDEO); expect(adaptation.id).to.equal(0); }); @@ -507,41 +492,12 @@ describe('DashAdapter', function () { expect(index).to.be.equal(-1); }); - it('should return -1 when getMaxIndexForBufferType is called and bufferType and periodIdx are undefined', () => { - const index = dashAdapter.getMaxIndexForBufferType(); - - expect(index).to.be.equal(-1); - }); - it('should return undefined when getRealAdaptation is called and streamInfo parameter is null or undefined', function () { const realAdaptation = dashAdapter.getRealAdaptation(null, voHelper.getDummyMediaInfo(Constants.VIDEO)); expect(realAdaptation).to.be.undefined; }); - it('should return the correct adaptation when getAdaptationForType is called', () => { - const streamInfo = { - id: 'id' - }; - - const track = new MediaInfo(); - - track.id = undefined; - track.index = 1; - track.streamInfo = streamInfo; - track.representationCount = 0; - track.lang = 'deu'; - track.roles = [{ schemeIdUri: 'urn:mpeg:dash:role:2011', value: 'main'}]; - track.rolesWithSchemeIdUri = [{ schemeIdUri: 'aScheme', value: 'main' }]; - track.codec = 'audio/mp4;codecs="mp4a.40.2"'; - track.mimeType = 'audio/mp4'; - - dashAdapter.setCurrentMediaInfo(streamInfo.id, Constants.AUDIO, track); - const adaptation = dashAdapter.getAdaptationForType(0, Constants.AUDIO, streamInfo); - - expect(adaptation.lang).to.equal('eng'); - }); - it('should return an empty array when getEventsFor is called and info parameter is undefined', function () { const eventsArray = dashAdapter.getEventsFor(); diff --git a/test/unit/dash.controllers.RepresentationController.js b/test/unit/dash.controllers.RepresentationController.js index f70db24d61..d8152625c1 100644 --- a/test/unit/dash.controllers.RepresentationController.js +++ b/test/unit/dash.controllers.RepresentationController.js @@ -1,6 +1,5 @@ import ObjectsHelper from './helpers/ObjectsHelper.js'; import VoHelper from './helpers/VOHelper.js'; -import MpdHelper from './helpers/MPDHelper.js'; import EventBus from '../../src/core/EventBus.js'; import RepresentationController from '../../src/dash/controllers/RepresentationController.js'; import Events from '../../src/core/events/Events.js'; @@ -27,9 +26,6 @@ describe('RepresentationController', function () { const context = {}; const testType = 'video'; const specHelper = new SpecHelper(); - const mpdHelper = new MpdHelper(); - const mpd = mpdHelper.getMpd('static'); - const data = mpd.Period[0].AdaptationSet[0]; const voRepresentations = []; voRepresentations.push(voHelper.getDummyRepresentation(testType, 0), voHelper.getDummyRepresentation(testType, 1), voHelper.getDummyRepresentation(testType, 2)); const streamProcessor = objectsHelper.getDummyStreamProcessor(testType); @@ -46,28 +42,6 @@ describe('RepresentationController', function () { let representationController; - describe('Config not correctly passed', function () { - beforeEach(function () { - representationController = RepresentationController(context).create({ - streamInfo: streamProcessor.getStreamInfo(), - events: Events, - eventBus: eventBus - }); - }); - - afterEach(function () { - representationController.reset(); - representationController = null; - }); - it('should not contain data before it is set', function () { - // Act - const data = representationController.getData(); - - // Assert - expect(data).not.exist; // jshint ignore:line - }); - - }); describe('Config correctly passed', function () { beforeEach(function () { @@ -94,39 +68,32 @@ describe('RepresentationController', function () { describe('when data update completed', function () { beforeEach(function (done) { - representationController.updateData(data, voRepresentations, testType, true, 0); + representationController.updateData(voRepresentations, true, voRepresentations[0].id); setTimeout(function () { done(); }, specHelper.getExecutionDelay()); }); - it('should return the data that was set', function () { - expect(representationController.getData()).to.equal(data); - }); - it('should return correct representation for quality', function () { - const quality = 0; - const expectedValue = 0; + it('should return current selected representation', function () { + const expectedValue = voRepresentations[0].id; - expect(representationController.getRepresentationForQuality(quality).index).to.equal(expectedValue); + expect(representationController.getCurrentRepresentation().id).to.equal(expectedValue); }); - it('should return null if quality is undefined', function () { - expect(representationController.getRepresentationForQuality()).to.equal(null); + it('should return null if id is undefined', function () { + expect(representationController.getRepresentationById()).to.equal(null); }); - it('should return null if quality is greater than voAvailableRepresentations.length - 1', function () { - expect(representationController.getRepresentationForQuality(150)).to.equal(null); - }); it('should update current representation when preparing a quality change', function () { let currentRepresentation = representationController.getCurrentRepresentation(); - expect(currentRepresentation.index).to.equal(0); // jshint ignore:line + expect(currentRepresentation.id).to.equal(voRepresentations[0].id); // jshint ignore:line - representationController.prepareQualityChange(1); + representationController.prepareQualityChange(voRepresentations[1]); currentRepresentation = representationController.getCurrentRepresentation(); - expect(currentRepresentation.index).to.equal(1); // jshint ignore:line + expect(currentRepresentation.id).to.equal(voRepresentations[1].id); // jshint ignore:line }); it('when a MANIFEST_VALIDITY_CHANGED event occurs, should update current representation', function () { @@ -140,10 +107,10 @@ describe('RepresentationController', function () { }); describe('when a call to reset is done', function () { - it('should not contain data after a call to reset', function () { + it('should not contain representation after a call to reset', function () { representationController.reset(); // Act - const data = representationController.getData(); + const data = representationController.getCurrentRepresentation(); // Assert expect(data).not.exist; // jshint ignore:line diff --git a/test/unit/helpers/ObjectsHelper.js b/test/unit/helpers/ObjectsHelper.js index c0a75c8b83..a36286b11d 100644 --- a/test/unit/helpers/ObjectsHelper.js +++ b/test/unit/helpers/ObjectsHelper.js @@ -20,10 +20,13 @@ class ObjectsHelper { { bandwidth: 2000000 }, { bandwidth: 3000000 }, ], - mimeType: "video/mp4" + mimeType: 'video/mp4' } + }, + getRepresentation: () => { + return null } - }; + } } getDummyLogger() { diff --git a/test/unit/helpers/VOHelper.js b/test/unit/helpers/VOHelper.js index 8607aba4e4..75bbb88f3b 100644 --- a/test/unit/helpers/VOHelper.js +++ b/test/unit/helpers/VOHelper.js @@ -60,7 +60,7 @@ class VoHelper { createRepresentation(type, index) { var rep = new Representation(); - rep.id = null; + rep.id = index; rep.index = index || 0; rep.adaptation = this.createAdaptation(type); rep.fragmentInfoType = null; @@ -71,11 +71,14 @@ class VoHelper { rep.indexRange = null; rep.range = null; rep.presentationTimeOffset = 10; + rep.bandwidth = 1000000; + rep.bitrateInKbit = 1000; rep.segmentInfoType = DashConstants.SEGMENT_TEMPLATE; // Set the source buffer timeOffset to this - rep.MSETimeOffset = NaN; + rep.mseTimeOffset = NaN; rep.segmentAvailabilityWindow = null; rep.availableSegmentsNumber = 0; + rep.mediaInfo = this.getDummyMediaInfo(type); return rep; } @@ -83,12 +86,14 @@ class VoHelper { createTimelineRepresentation(type, index) { var rep = new Representation(); - rep.id = null; + rep.id = index; rep.index = index || 0; rep.adaptation = this.createAdaptation(type, 1); rep.fragmentInfoType = null; rep.initialization = 'https://dash.akamaized.net/envivio/dashpr/clear/video4/Header.m4s'; rep.segmentDuration = 1; + rep.bandwidth = 1000000; + rep.bitrateInKbit = 1000; rep.timescale = 1; rep.startNumber = 1; rep.indexRange = null; @@ -96,7 +101,7 @@ class VoHelper { rep.presentationTimeOffset = 10; rep.segmentInfoType = DashConstants.SEGMENT_TIMELINE; // Set the source buffer timeOffset to this - rep.MSETimeOffset = NaN; + rep.mseTimeOffset = NaN; rep.segmentAvailabilityWindow = null; rep.availableSegmentsNumber = 0; diff --git a/test/unit/mocks/AbrControllerMock.js b/test/unit/mocks/AbrControllerMock.js index e40662b5ca..a895325823 100644 --- a/test/unit/mocks/AbrControllerMock.js +++ b/test/unit/mocks/AbrControllerMock.js @@ -106,6 +106,10 @@ function AbrControllerMock () { this.getMinAllowedIndexFor = function () {}; this.clearDataForStream = function () {}; + + this.getPossibleVoRepresentations = function () { + return [] + } } export default AbrControllerMock; diff --git a/test/unit/mocks/AdapterMock.js b/test/unit/mocks/AdapterMock.js index f3dbeb1dd7..b658ab3cdb 100644 --- a/test/unit/mocks/AdapterMock.js +++ b/test/unit/mocks/AdapterMock.js @@ -136,10 +136,6 @@ function AdapterMock() { this.applyPatchToManifest = function () { }; - this.convertRepresentationToRepresentationInfo = function () { - return null; - }; - this.getIsTypeOf = function () { return true; }; diff --git a/test/unit/mocks/MediaControllerMock.js b/test/unit/mocks/MediaControllerMock.js index e3b6596b17..846116d4be 100644 --- a/test/unit/mocks/MediaControllerMock.js +++ b/test/unit/mocks/MediaControllerMock.js @@ -12,6 +12,10 @@ class MediaControllerMock { this.tracks = []; } + initialize() { + + } + checkInitialMediaSettingsForType() { } @@ -89,7 +93,7 @@ class MediaControllerMock { return (type === 'audio' || type === 'video' || type === 'text'); } - isTracksEqual(currentTrack, mediaInfoForType) { + areTracksEqual(currentTrack, mediaInfoForType) { return (mediaInfoForType.lang === 'deu'); } diff --git a/test/unit/mocks/RepresentationControllerMock.js b/test/unit/mocks/RepresentationControllerMock.js index 653e9ebb38..84fa7ef344 100644 --- a/test/unit/mocks/RepresentationControllerMock.js +++ b/test/unit/mocks/RepresentationControllerMock.js @@ -7,9 +7,6 @@ function RepresentationControllerMock () { this.updateRepresentation = function () {}; - this.getCurrentRepresentationInfo = function () { - return {} - } } export default RepresentationControllerMock; diff --git a/test/unit/mocks/RulesContextMock.js b/test/unit/mocks/RulesContextMock.js index f5178ef394..68c752a0b8 100644 --- a/test/unit/mocks/RulesContextMock.js +++ b/test/unit/mocks/RulesContextMock.js @@ -2,11 +2,13 @@ import FragmentRequest from '../../../src/streaming/vo/FragmentRequest.js'; function SwitchRequestHistoryMock() { this.getSwitchRequests = function () { - return [{ - drops: 7, - noDrops: 0, - dropSize: 4 - }]; + return { + 1: { + drops: 7, + noDrops: 0, + dropSize: 4 + } + }; }; } @@ -24,16 +26,19 @@ function RulesContextMock() { return fragRequest; }; - this.getRepresentationInfo = function () { + this.getVoRepresentation = function () { }; this.getAbrController = function () { return { + getPossibleVoRepresentations: function () { + return [{ id: 1 }] + } }; }; this.getSwitchHistory = function () { return new SwitchRequestHistoryMock(); }; - this.getRepresentationInfo = function () { + this.getVoRepresentation = function () { return { fragmentDuration: NaN }; diff --git a/test/unit/mocks/StreamMock.js b/test/unit/mocks/StreamMock.js index 0313dbff06..55f1fa6972 100644 --- a/test/unit/mocks/StreamMock.js +++ b/test/unit/mocks/StreamMock.js @@ -52,4 +52,8 @@ StreamMock.prototype.getProcessors = function () { return this.streamProcessors; }; +StreamMock.prototype.getCurrentRepresentationForType = function () { + return null; +} + export default StreamMock; diff --git a/test/unit/mocks/StreamProcessorMock.js b/test/unit/mocks/StreamProcessorMock.js index a26f79b200..7cabf7abdb 100644 --- a/test/unit/mocks/StreamProcessorMock.js +++ b/test/unit/mocks/StreamProcessorMock.js @@ -66,7 +66,8 @@ function StreamProcessorMock (testType, streamInfo) { return { bitrateList: [], mimeType: 'video/mp4', - streamInfo: this.streamInfo + streamInfo: this.streamInfo, + type: 'video' }; }; @@ -98,11 +99,11 @@ function StreamProcessorMock (testType, streamInfo) { return this.streamInfo; }; - this.getRepresentationInfo = function (quality) { + this.getRepresentation = function (quality) { if (quality !== undefined) { let offset = quality ? 2 : 1; return { - MSETimeOffset: offset + mseTimeOffset: offset }; } else { return {mediaInfo: {type: this.type, streamInfo: this.streamInfo}, fragmentDuration: 6}; diff --git a/test/unit/mss.MssFragmentProcessor.js b/test/unit/mss.MssFragmentProcessor.js index 92ec04669c..f3c4afb2e1 100644 --- a/test/unit/mss.MssFragmentProcessor.js +++ b/test/unit/mss.MssFragmentProcessor.js @@ -49,9 +49,12 @@ describe('MssFragmentProcessor', function () { expect(mssFragmentProcessor.processFragment.bind(mssFragmentProcessor, { request: { type: 'MediaSegment' } })).to.throw('e parameter is missing or malformed'); }); - it('should throw an error when attempting to call processFragment for mp4 media live segment without tfrf box', async() => { + it('should throw an error when attempting to call processFragment for mp4 media live segment without tfrf box', async () => { const arrayBuffer = await FileLoader.loadArrayBufferFile('/data/mss/mss_moof_tfdt.mp4'); - const e = { request: { type: 'MediaSegment', mediaInfo: { index: 0 } }, response: arrayBuffer }; + const e = { + request: { type: 'MediaSegment', representation: { mediaInfo: { index: 0 } } }, + response: arrayBuffer + }; mssFragmentProcessor.processFragment(e, streamProcessorMock); expect(errorHandlerMock.errorValue).to.equal(MssErrors.MSS_NO_TFRF_MESSAGE); expect(errorHandlerMock.errorCode).to.equal(MssErrors.MSS_NO_TFRF_CODE); @@ -59,7 +62,10 @@ describe('MssFragmentProcessor', function () { it('should not throw an error when attempting to call processFragment for mp4 media live segment with tfrf box', async () => { const arrayBuffer = await FileLoader.loadArrayBufferFile('/data/mss/mss_moof.mp4'); - const e = { request: { type: 'MediaSegment', mediaInfo: { index: 0 } }, response: arrayBuffer }; + const e = { + request: { type: 'MediaSegment', representation: { mediaInfo: { index: 0 } } }, + response: arrayBuffer + }; mssFragmentProcessor.processFragment(e, streamProcessorMock); expect(errorHandlerMock.errorValue).not.to.equal(MssErrors.MSS_NO_TFRF_MESSAGE); expect(errorHandlerMock.errorCode).not.to.equal(MssErrors.MSS_NO_TFRF_CODE); diff --git a/test/unit/streaming.MediaPlayer.js b/test/unit/streaming.MediaPlayer.js index 1dedc51fa9..48740b8049 100644 --- a/test/unit/streaming.MediaPlayer.js +++ b/test/unit/streaming.MediaPlayer.js @@ -423,44 +423,6 @@ describe('MediaPlayer', function () { expect(minAllowedBitrateFor).to.equal(5); }); - it('should configure MaxAllowedRepresentationRatioFor', function () { - let maxAllowedRepresentationRatioFor = player.getSettings().streaming.abr.maxRepresentationRatio.audio; - expect(maxAllowedRepresentationRatioFor).to.equal(1); - - player.updateSettings({ - 'streaming': { - 'abr': { - 'maxRepresentationRatio': { - 'audio': 5 - } - } - } - }); - - maxAllowedRepresentationRatioFor = player.getSettings().streaming.abr.maxRepresentationRatio.audio; - expect(maxAllowedRepresentationRatioFor).to.equal(5); - }); - - it('should update portal size', function () { - let elementHeight = abrControllerMock.getElementHeight(); - let elementWidth = abrControllerMock.getElementWidth(); - let windowResizeEventCalled = abrControllerMock.getWindowResizeEventCalled(); - - expect(elementHeight).to.be.undefined; // jshint ignore:line - expect(elementWidth).to.be.undefined; // jshint ignore:line - expect(windowResizeEventCalled).to.be.false; // jshint ignore:line - - player.updatePortalSize(); - - elementHeight = abrControllerMock.getElementHeight(); - elementWidth = abrControllerMock.getElementWidth(); - windowResizeEventCalled = abrControllerMock.getWindowResizeEventCalled(); - - expect(elementHeight).to.equal(10); - expect(elementWidth).to.equal(10); - expect(windowResizeEventCalled).to.be.true; // jshint ignore:line - }); - it('should configure bitrate according to playback area size', function () { let limitBitrateByPortal = player.getSettings().streaming.abr.limitBitrateByPortal; expect(limitBitrateByPortal).to.be.false; // jshint ignore:line @@ -493,23 +455,6 @@ describe('MediaPlayer', function () { expect(UsePixelRatioInLimitBitrateByPortal).to.be.true; // jshint ignore:line }); - it('should configure initialRepresentationRatioFor', function () { - let initialRepresentationRatioFor = player.getSettings().streaming.abr.initialRepresentationRatio.video; - expect(initialRepresentationRatioFor).to.equal(-1); // jshint ignore:line - - player.updateSettings({ - 'streaming': { - 'abr': { - 'initialRepresentationRatio': { - 'video': 10 - } - } - } - }); - - initialRepresentationRatioFor = player.getSettings().streaming.abr.initialRepresentationRatio.video; - expect(initialRepresentationRatioFor).to.equal(10); - }); it('should not set setAutoSwitchBitrateFor value if it\'s not a boolean type', function () { let autoSwitchBitrateForVideo = player.getSettings().streaming.abr.autoSwitchBitrate.video; @@ -538,42 +483,11 @@ describe('MediaPlayer', function () { describe('When it is not initialized', function () { it('Method getQualityFor should throw an exception', function () { - expect(player.getQualityFor).to.throw(STREAMING_NOT_INITIALIZED_ERROR); + expect(player.getCurrentRepresentationForType).to.throw(STREAMING_NOT_INITIALIZED_ERROR); }); it('Method setQualityFor should throw an exception', function () { - expect(player.setQualityFor).to.throw(STREAMING_NOT_INITIALIZED_ERROR); - }); - }); - - describe('When it is initialized', function () { - beforeEach(function () { - player.initialize(videoElementMock, dummyUrl, false); - }); - - it('should configure quality for type', function () { - let qualityFor = abrControllerMock.getQualityFor('video', { - id: 'DUMMY_STREAM-01' - }); - expect(qualityFor).to.equal(abrControllerMock.QUALITY_DEFAULT()); - - qualityFor = player.getQualityFor('video'); - expect(qualityFor).to.equal(abrControllerMock.QUALITY_DEFAULT()); - - player.setQualityFor('video', 10); - - qualityFor = abrControllerMock.getQualityFor('video', { - id: 'DUMMY_STREAM-01' - }); - expect(qualityFor).to.equal(10); - - qualityFor = player.getQualityFor('video'); - expect(qualityFor).to.equal(10); - }); - - it('Method getTopBitrateInfoFor should return null when type is undefined', function () { - const topBitrateInfo = player.getTopBitrateInfoFor(); - expect(topBitrateInfo).to.be.null; // jshint ignore:line + expect(player.getCurrentRepresentationForType).to.throw(STREAMING_NOT_INITIALIZED_ERROR); }); }); }); @@ -736,7 +650,7 @@ describe('MediaPlayer', function () { it('should configure bufferTimeDefault', function () { let bufferTimeDefault = player.getSettings().streaming.buffer.bufferTimeDefault; - expect(bufferTimeDefault).to.equal(12); + expect(bufferTimeDefault).to.equal(18); }); it('should configure BufferTimeAtTopQuality', function () { @@ -962,7 +876,7 @@ describe('MediaPlayer', function () { describe('Stream and Track Management Functions', function () { describe('When it is not initialized', function () { it('Method getBitrateInfoListFor should throw an exception', function () { - expect(player.getBitrateInfoListFor).to.throw('You must first call initialize() and set a source before calling this method'); + expect(player.getCurrentRepresentationForType).to.throw('You must first call initialize() and set a source before calling this method'); }); it('Method getStreamsFromManifest should throw an exception', function () { @@ -1000,8 +914,6 @@ describe('MediaPlayer', function () { }); describe('Stream and Track Management Functions', function () { - describe('When it is not initialized', function () { - }); describe('When it is initialized', function () { beforeEach(function () { @@ -1012,11 +924,6 @@ describe('MediaPlayer', function () { mediaControllerMock.setTrack('track1'); }); - it('Method getBitrateInfoListFor should return bitrate info list', function () { - const bitrateList = player.getBitrateInfoListFor(); - expect(bitrateList.length).to.equal(2); - }); - it('Method getTracksFor should return tracks', function () { const tracks = player.getTracksFor(); expect(tracks.length).to.equal(2); diff --git a/test/unit/streaming.Stream.js b/test/unit/streaming.Stream.js index c82f85e911..b1cf59e11e 100644 --- a/test/unit/streaming.Stream.js +++ b/test/unit/streaming.Stream.js @@ -4,14 +4,12 @@ import ProtectionEvents from '../../src/streaming/protection/ProtectionEvents.js import EventBus from '../../src/core/EventBus.js'; import DashJSError from '../../src/streaming/vo/DashJSError.js'; import ProtectionErrors from '../../src/streaming/protection/errors/ProtectionErrors.js'; -import Constants from '../../src/streaming/constants/Constants.js'; import Errors from '../../src/core/errors/Errors.js'; import Settings from '../../src/core/Settings.js'; import AdapterMock from './mocks/AdapterMock.js'; import ManifestModelMock from './mocks/ManifestModelMock.js'; import ErrorHandlerMock from './mocks/ErrorHandlerMock.js'; import AbrControllerMock from './mocks/AbrControllerMock.js'; -import StreamMock from './mocks/StreamMock.js'; import ManifestUpdaterMock from './mocks/ManifestUpdaterMock.js'; import PlaybackControllerMock from './mocks/PlaybackControllerMock.js'; import CapabilitiesMock from './mocks/CapabilitiesMock.js'; @@ -117,44 +115,6 @@ describe('Stream', function () { expect(duration).to.be.NaN; // jshint ignore:line }); - it('should return null false isMediaCodecCompatible is called but stream attribute is undefined', () => { - stream.reset(); - const isCompatible = stream.isMediaCodecCompatible(); - expect(isCompatible).to.be.false; // jshint ignore:line - }); - - it('should return false when isMediaCodecCompatible is called but stream attribute is an empty object', () => { - stream.reset(); - const isCompatible = stream.isMediaCodecCompatible({}); - expect(isCompatible).to.be.false; // jshint ignore:line - }); - - it('should return false when isMediaCodecCompatible is called with a correct stream attribute', () => { - stream.reset(); - const isCompatible = stream.isMediaCodecCompatible(new StreamMock()); - expect(isCompatible).to.be.false; // jshint ignore:line - }); - - it('should return true when isProtectionCompatible is called but new stream attribute is undefined', () => { - stream.reset(); - const isCompatible = stream.isProtectionCompatible(); - expect(isCompatible).to.be.true; // jshint ignore:line - }); - - it('should return an empty array when getBitrateListFor is called but no stream processor is defined', () => { - stream.reset(); - const bitrateList = stream.getBitrateListFor(''); - expect(bitrateList).to.be.instanceOf(Array); // jshint ignore:line - expect(bitrateList).to.be.empty; // jshint ignore:line - }); - - it('should return an empty array when getBitrateListFor, for image type, is called but thumbnailController is not defined', () => { - stream.reset(); - const bitrateList = stream.getBitrateListFor(Constants.IMAGE); - expect(bitrateList).to.be.instanceOf(Array); // jshint ignore:line - expect(bitrateList).to.be.empty; // jshint ignore:line - }); - it('License expired behavior', function () { stream.initialize(); diff --git a/test/unit/Streaming.StreamProcessor.js b/test/unit/streaming.StreamProcessor.js similarity index 69% rename from test/unit/Streaming.StreamProcessor.js rename to test/unit/streaming.StreamProcessor.js index 08fe60fda7..79d00953f0 100644 --- a/test/unit/Streaming.StreamProcessor.js +++ b/test/unit/streaming.StreamProcessor.js @@ -1,5 +1,4 @@ import StreamProcessor from '../../src/streaming/StreamProcessor.js'; -import Constants from '../../src/streaming/constants/Constants.js'; import {expect} from 'chai'; const context = {}; @@ -27,9 +26,6 @@ describe('StreamProcessor', function () { expect(streamProcessor.setExplicitBufferingTime.bind(streamProcessor)).to.not.throw(); }); - it('getRepresentationInfo should throw an error when quality is not a number', function () { - expect(streamProcessor.getRepresentationInfo.bind(streamProcessor, {})).to.be.throw(Constants.BAD_ARGUMENT_ERROR + ' : argument is not an integer'); - }); }); }); diff --git a/test/unit/streaming.controllers.AbrController.js b/test/unit/streaming.controllers.AbrController.js index 11de72a088..5b7e85dc0a 100644 --- a/test/unit/streaming.controllers.AbrController.js +++ b/test/unit/streaming.controllers.AbrController.js @@ -1,7 +1,6 @@ import VoHelper from './helpers/VOHelper.js'; import ObjectsHelper from './helpers/ObjectsHelper.js'; import AbrController from '../../src/streaming/controllers/AbrController.js'; -import BitrateInfo from '../../src/streaming/vo/BitrateInfo.js'; import Constants from '../../src/streaming/constants/Constants.js'; import Settings from '../../src/core/Settings.js'; import VideoModelMock from './mocks/VideoModelMock.js'; @@ -15,21 +14,21 @@ import CmsdModel from '../../src/streaming/models/CmsdModel.js'; import ServiceDescriptionController from '../../src/dash/controllers/ServiceDescriptionController.js'; import PlaybackControllerMock from './mocks/PlaybackControllerMock.js'; import ThroughputControllerMock from './mocks/ThroughputControllerMock.js'; -import {expect} from 'chai'; +import {expect, assert} from 'chai'; +import EventBus from '../../src/core/EventBus.js'; +import MediaPlayerEvents from '../../src/streaming/MediaPlayerEvents.js'; +import sinon from 'sinon'; describe('AbrController', function () { const context = {}; const voHelper = new VoHelper(); const objectsHelper = new ObjectsHelper(); - const defaultQuality = AbrController.QUALITY_DEFAULT; + const eventBus = EventBus(context).getInstance(); const settings = Settings(context).getInstance(); const abrCtrl = AbrController(context).getInstance(); const dummyMediaInfo = voHelper.getDummyMediaInfo(Constants.VIDEO); - const representationCount = dummyMediaInfo.representationCount; - const streamProcessor = objectsHelper.getDummyStreamProcessor(Constants.VIDEO); - const adapterMock = new AdapterMock(); - const videoModelMock = new VideoModelMock(); + const dummyRepresentations = [voHelper.getDummyRepresentation(Constants.VIDEO, 0), voHelper.getDummyRepresentation(Constants.VIDEO, 1)]; const domStorageMock = new DomStorageMock(); const dashMetricsMock = new DashMetricsMock(); const streamControllerMock = new StreamControllerMock(); @@ -40,12 +39,18 @@ describe('AbrController', function () { const playbackControllerMock = new PlaybackControllerMock(); const throughputControllerMock = new ThroughputControllerMock(); + let streamProcessor; + let adapterMock; + let videoModelMock; + mediaPlayerModel.setConfig({ serviceDescriptionController, playbackController: playbackControllerMock }) beforeEach(function () { + adapterMock = new AdapterMock(); + videoModelMock = new VideoModelMock(); abrCtrl.setConfig({ dashMetrics: dashMetricsMock, videoModel: videoModelMock, @@ -58,6 +63,7 @@ describe('AbrController', function () { throughputController: throughputControllerMock, customParametersModel }); + streamProcessor = objectsHelper.getDummyStreamProcessor(Constants.VIDEO); abrCtrl.initialize(); abrCtrl.registerStreamType(Constants.VIDEO, streamProcessor); }); @@ -65,6 +71,7 @@ describe('AbrController', function () { afterEach(function () { abrCtrl.reset(); settings.reset(); + eventBus.reset(); }); it('should return null when attempting to get abandonment state when abandonmentStateDict array is empty', function () { @@ -72,9 +79,9 @@ describe('AbrController', function () { expect(state).to.be.null; }); - it('should return 0 when calling getQualityForBitrate with no mediaInfo', function () { - const quality = abrCtrl.getQualityForBitrate(undefined, undefined, true); - expect(quality).to.be.equal(0); + it('should return null when calling getQualityForBitrate with no mediaInfo', function () { + const quality = abrCtrl.getOptimalRepresentationForBitrate(undefined, undefined, true); + expect(quality).to.not.exist; }); it('should return true if isPlayingAtTopQuality function is called without parameter', function () { @@ -82,193 +89,178 @@ describe('AbrController', function () { expect(isPlayingTopQuality).to.be.true; }); - it('should update top quality index', function () { - const expectedTopQuality = representationCount - 1; - let actualTopQuality; - - actualTopQuality = abrCtrl.updateTopQualityIndex(dummyMediaInfo); - - expect(actualTopQuality).to.be.equal(expectedTopQuality); - }); - - it('should set a quality in a range between zero and a top quality index', function () { - const testQuality = 1; - let newQuality; - - abrCtrl.updateTopQualityIndex(dummyMediaInfo); - abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality); - newQuality = abrCtrl.getQualityFor(Constants.VIDEO, dummyMediaInfo.streamInfo.id); - expect(newQuality).to.be.equal(testQuality); - }); - - it('should throw an exception when attempting to set not a number value for a quality', function () { - let testQuality = 'a'; - expect(abrCtrl.setPlaybackQuality.bind(abrCtrl, Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality)).to.throw(Constants.BAD_ARGUMENT_ERROR + ' : argument is not an integer'); - - testQuality = null; - expect(abrCtrl.setPlaybackQuality.bind(abrCtrl, Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality)).to.throw(Constants.BAD_ARGUMENT_ERROR + ' : argument is not an integer'); + it('should switch to a new Representation', function (done) { + const onQualityChange = (e) => { + expect(e.oldRepresentation).to.not.exist; + expect(e.newRepresentation.id).to.be.equal(dummyRepresentations[0].id) + eventBus.off(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChange) + done() + } - testQuality = 2.5; - expect(abrCtrl.setPlaybackQuality.bind(abrCtrl, Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality)).to.throw(Constants.BAD_ARGUMENT_ERROR + ' : argument is not an integer'); + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, onQualityChange, this); - testQuality = {}; - expect(abrCtrl.setPlaybackQuality.bind(abrCtrl, Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality)).to.throw(Constants.BAD_ARGUMENT_ERROR + ' : argument is not an integer'); + abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, dummyRepresentations[0]); }); it('should ignore an attempt to set a quality value if no streamInfo is provided', function () { - const targetQuality = 2; - const oldQuality = abrCtrl.getQualityFor(Constants.VIDEO); - let newQuality; - - abrCtrl.setPlaybackQuality(Constants.VIDEO, null, targetQuality); - newQuality = abrCtrl.getQualityFor(Constants.VIDEO); - expect(newQuality).to.be.equal(oldQuality); - }); - - it('should ignore an attempt to set a negative quality value', function () { - const negativeQuality = -1; - const oldQuality = abrCtrl.getQualityFor(Constants.VIDEO); - let newQuality; - - abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, negativeQuality); - newQuality = abrCtrl.getQualityFor(Constants.VIDEO); - expect(newQuality).to.be.equal(oldQuality); - }); + const spy = sinon.spy(); - it('should ignore an attempt to set a quality greater than top quality index', function () { - const greaterThanTopQualityValue = representationCount; - const oldQuality = abrCtrl.getQualityFor(Constants.VIDEO); - let newQuality; - - abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, greaterThanTopQualityValue); - newQuality = abrCtrl.getQualityFor(Constants.VIDEO); - - expect(newQuality).to.be.equal(oldQuality); + assert.equal(spy.notCalled, true); + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, spy, this); + abrCtrl.setPlaybackQuality(Constants.VIDEO, null, dummyRepresentations[0]); }); - it('should restore a default quality value after reset', function () { - const testQuality = 1; - let newQuality; - - abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, testQuality); - abrCtrl.reset(); - newQuality = abrCtrl.getQualityFor(Constants.VIDEO); - expect(newQuality).to.be.equal(defaultQuality); - }); - - it('should compose a list of available bitrates', function () { - const expectedBitrates = dummyMediaInfo.bitrateList; - const actualBitrates = abrCtrl.getBitrateList(dummyMediaInfo); - let item, - match; - - match = expectedBitrates.filter(function (val, idx) { - item = actualBitrates[idx]; - return (item && (item.qualityIndex === idx) && (item.bitrate === val.bandwidth) && (item.mediaType === dummyMediaInfo.type) && (item.width === val.width) && (item.height === val.height)); - }); + it('should ignore an attempt to set a quality value if no Representation is provided', function () { + const spy = sinon.spy(); - expect(match.length).to.be.equal(expectedBitrates.length); + assert.equal(spy.notCalled, true); + eventBus.on(MediaPlayerEvents.QUALITY_CHANGE_REQUESTED, spy, this); + abrCtrl.setPlaybackQuality(Constants.VIDEO, dummyMediaInfo.streamInfo, null); }); - it('should return the appropriate max allowed index for the max allowed bitrate set', function () { + it('should return the right Representations for maxBitrate values', function () { const mediaInfo = streamProcessor.getMediaInfo(); + const bitrateList = mediaInfo.bitrateList; + + adapterMock.getVoRepresentations = () => { + return [ + { + bitrateInKbit: bitrateList[0].bandwidth / 1000, + mediaInfo, + id: 1 + }, + { + bitrateInKbit: bitrateList[1].bandwidth / 1000, + mediaInfo, + id: 2 + }, + { + bitrateInKbit: bitrateList[2].bandwidth / 1000, + mediaInfo, + id: 3 + } + ] + } + + adapterMock.areMediaInfosEqual = () => { + return true + } mediaInfo.streamInfo = streamProcessor.getStreamInfo(); - mediaInfo.representationCount = 3; mediaInfo.type = Constants.VIDEO; - abrCtrl.updateTopQualityIndex(mediaInfo); // Max allowed bitrate in kbps, bandwidth is in bps const s = { streaming: { abr: { maxBitrate: {} } } }; - const streamId = streamProcessor.getStreamInfo().id; - s.streaming.abr.maxBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000; + s.streaming.abr.maxBitrate[Constants.VIDEO] = bitrateList[0].bandwidth / 1000; settings.update(s); + let possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo, false); + expect(possibleVoRepresentations.length).to.be.equal(1); + expect(possibleVoRepresentations[0].id).to.be.equal(1); - let maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(0); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[1].bandwidth / 1000; + s.streaming.abr.maxBitrate[Constants.VIDEO] = bitrateList[1].bandwidth / 1000; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(2); + expect(possibleVoRepresentations[1].id).to.be.equal(2); - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(1); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[2].bandwidth / 1000; + s.streaming.abr.maxBitrate[Constants.VIDEO] = bitrateList[2].bandwidth / 1000; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); + expect(possibleVoRepresentations[2].id).to.be.equal(3); - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(2); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000) + 1; + s.streaming.abr.maxBitrate[Constants.VIDEO] = (bitrateList[0].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(1); + expect(possibleVoRepresentations[0].id).to.be.equal(1); - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(0); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[1].bandwidth / 1000) + 1; + s.streaming.abr.maxBitrate[Constants.VIDEO] = (bitrateList[1].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(2); + expect(possibleVoRepresentations[1].id).to.be.equal(2); - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(1); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[2].bandwidth / 1000) + 1; + s.streaming.abr.maxBitrate[Constants.VIDEO] = (bitrateList[2].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); + expect(possibleVoRepresentations[2].id).to.be.equal(3); - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(2); - - s.streaming.abr.maxBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000) - 1; + s.streaming.abr.maxBitrate[Constants.VIDEO] = (bitrateList[0].bandwidth / 1000) - 1; settings.update(s); - - maxAllowedIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, streamId); - expect(maxAllowedIndex).to.be.equal(0); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); + expect(possibleVoRepresentations[2].id).to.be.equal(3); }); - it('should return the appropriate min allowed index for the min allowed bitrate set', function () { + it('should return the right Representations for minBitrate values', function () { + const mediaInfo = streamProcessor.getMediaInfo(); + const bitrateList = mediaInfo.bitrateList; + + adapterMock.getVoRepresentations = () => { + return [ + { + bitrateInKbit: bitrateList[0].bandwidth / 1000, + mediaInfo, + id: 1 + }, + { + bitrateInKbit: bitrateList[1].bandwidth / 1000, + mediaInfo, + id: 2 + }, + { + bitrateInKbit: bitrateList[2].bandwidth / 1000, + mediaInfo, + id: 3 + } + ] + } + + adapterMock.areMediaInfosEqual = () => { + return true + } + + mediaInfo.streamInfo = streamProcessor.getStreamInfo(); + mediaInfo.type = Constants.VIDEO; + // Min allowed bitrate in kbps, bandwidth is in bps const s = { streaming: { abr: { minBitrate: {} } } }; - const streamId = streamProcessor.getStreamInfo().id; - s.streaming.abr.minBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000; + s.streaming.abr.minBitrate[Constants.VIDEO] = bitrateList[0].bandwidth / 1000; settings.update(s); + let possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); - let minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(0); - - s.streaming.abr.minBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[1].bandwidth / 1000; + s.streaming.abr.minBitrate[Constants.VIDEO] = bitrateList[1].bandwidth / 1000; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(2); - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(1); - - s.streaming.abr.minBitrate[Constants.VIDEO] = streamProcessor.getMediaInfo().bitrateList[2].bandwidth / 1000; + s.streaming.abr.minBitrate[Constants.VIDEO] = bitrateList[2].bandwidth / 1000; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(1); - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(2); - - s.streaming.abr.minBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000) + 1; + s.streaming.abr.minBitrate[Constants.VIDEO] = (bitrateList[0].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(2); - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(1); - - s.streaming.abr.minBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[1].bandwidth / 1000) + 1; + s.streaming.abr.minBitrate[Constants.VIDEO] = (bitrateList[1].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(1); - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(2); - - s.streaming.abr.minBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[2].bandwidth / 1000) + 1; + s.streaming.abr.minBitrate[Constants.VIDEO] = (bitrateList[2].bandwidth / 1000) + 1; settings.update(s); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(2); - - s.streaming.abr.minBitrate[Constants.VIDEO] = (streamProcessor.getMediaInfo().bitrateList[0].bandwidth / 1000) - 1; + s.streaming.abr.minBitrate[Constants.VIDEO] = (bitrateList[0].bandwidth / 1000) - 1; settings.update(s); - - minAllowedIndex = abrCtrl.getMinAllowedIndexFor(Constants.VIDEO, streamId); - expect(minAllowedIndex).to.be.equal(0); + possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(3); }); it('should configure initial bitrate for video type', function () { @@ -283,29 +275,89 @@ describe('AbrController', function () { expect(initialBitrateFor).to.be.NaN; }); - it('should return an appropriate BitrateInfo when calling getTopBitrateInfoFor', function () { - abrCtrl.updateTopQualityIndex(dummyMediaInfo); - - let bitrateInfo = abrCtrl.getTopBitrateInfoFor(Constants.VIDEO); - expect(bitrateInfo).to.be.an.instanceOf(BitrateInfo); - expect(bitrateInfo.bitrate).to.be.equal(3000000); - expect(bitrateInfo.qualityIndex).to.be.equal(2); - + it('should return the appropriate possible Representations if limitBitrateByPortal is enabled', function () { + videoModelMock.getVideoElementSize = () => { + return { elementWidth: 800 } + }; const s = { streaming: { abr: { limitBitrateByPortal: true } } }; settings.update(s); - bitrateInfo = abrCtrl.getTopBitrateInfoFor(Constants.VIDEO); - expect(bitrateInfo).to.be.an.instanceOf(BitrateInfo); - expect(bitrateInfo.bitrate).to.be.equal(2000000); - expect(bitrateInfo.qualityIndex).to.be.equal(1); + const mediaInfo = streamProcessor.getMediaInfo(); + const bitrateList = mediaInfo.bitrateList; + + adapterMock.getVoRepresentations = () => { + return [ + { + bitrateInKbit: bitrateList[0].bandwidth / 1000, + bandwidth: bitrateList[0].bandwidth, + mediaInfo, + id: 1, + width: 640 + }, + { + bitrateInKbit: bitrateList[1].bandwidth / 1000, + bandwidth: bitrateList[1].bandwidth, + mediaInfo, + id: 2, + width: 720 + }, + { + bitrateInKbit: bitrateList[2].bandwidth / 1000, + bandwidth: bitrateList[2].bandwidth, + mediaInfo, + id: 3, + width: 1920 + } + ] + } + + adapterMock.areMediaInfosEqual = () => { + return true + } + + mediaInfo.streamInfo = streamProcessor.getStreamInfo(); + mediaInfo.type = Constants.VIDEO; + + let possibleVoRepresentations = abrCtrl.getPossibleVoRepresentations(mediaInfo); + expect(possibleVoRepresentations.length).to.be.equal(2); }); - it('should return the appropriate top quality index when calling getMaxAllowedIndexFor', function () { - videoModelMock.setClientWidth(899); - const s = { streaming: { abr: { limitBitrateByPortal: true } } }; - settings.update(s); - abrCtrl.updateTopQualityIndex({ type: Constants.VIDEO, streamInfo: { id: 'test' }, representationCount: 5 }); - let topQualityIndex = abrCtrl.getMaxAllowedIndexFor(Constants.VIDEO, 'test'); - expect(topQualityIndex).to.be.equal(4); + it('should return an appropriate Representation when calling getOptimalRepresentationForBitrate', function () { + const mediaInfo = streamProcessor.getMediaInfo(); + const bitrateList = mediaInfo.bitrateList; + + adapterMock.getVoRepresentations = () => { + return [ + { + bitrateInKbit: bitrateList[0].bandwidth / 1000, + bandwidth: bitrateList[0].bandwidth, + mediaInfo, + id: 1 + }, + { + bitrateInKbit: bitrateList[1].bandwidth / 1000, + bandwidth: bitrateList[1].bandwidth, + mediaInfo, + id: 2 + }, + { + bitrateInKbit: bitrateList[2].bandwidth / 1000, + bandwidth: bitrateList[2].bandwidth, + mediaInfo, + id: 3 + } + ] + } + + adapterMock.areMediaInfosEqual = () => { + return true + } + + mediaInfo.streamInfo = streamProcessor.getStreamInfo(); + mediaInfo.type = Constants.VIDEO; + + let optimalRepresentationForBitrate = abrCtrl.getOptimalRepresentationForBitrate(mediaInfo, bitrateList[2].bandwidth / 1000); + expect(optimalRepresentationForBitrate.id).to.be.equal(3); }); + }); diff --git a/test/unit/streaming.controllers.BufferController.js b/test/unit/streaming.controllers.BufferController.js index a7ea8bb2b0..07b7b912ad 100644 --- a/test/unit/streaming.controllers.BufferController.js +++ b/test/unit/streaming.controllers.BufferController.js @@ -17,7 +17,7 @@ import TextControllerMock from './mocks/TextControllerMock.js'; import RepresentationControllerMock from './mocks/RepresentationControllerMock.js'; import chai from 'chai'; -import sinon from 'sinon'; + const expect = chai.expect; const context = {}; @@ -130,11 +130,13 @@ describe('BufferController', function () { const chunk = { bytes: 'initData', quality: 2, - mediaInfo: { - type: 'video' - }, streamId: streamInfo.id, - representationId: 'representationId' + representation: { + id: 'representationId', + mediaInfo: { + type: 'video' + }, + } }; initCache.save(chunk); @@ -184,12 +186,14 @@ describe('BufferController', function () { const event = { chunk: { streamId: streamInfo.id, - mediaInfo: { - type: 'video' - }, bytes: 'initData', quality: 2, - representationId: 'representationId' + representation: { + id: 'representationId', + mediaInfo: { + type: 'video' + }, + } } }; const onInitDataLoaded = function () { @@ -207,12 +211,14 @@ describe('BufferController', function () { it('should save init data into cache', function (done) { const chunk = { streamId: streamInfo.id, - mediaInfo: { - type: 'video' - }, bytes: 'initData', quality: 2, - representationId: 'representationId' + representation: { + id: 'representationId', + mediaInfo: { + type: 'video' + }, + } }; const event = { chunk: chunk @@ -221,12 +227,12 @@ describe('BufferController', function () { settings.update({ streaming: { cacheInitSegments: true } }); initCache.reset(); - let cache = initCache.extract(chunk.streamId, chunk.representationId); + let cache = initCache.extract(chunk.streamId, chunk.representation.id); const onInitDataLoaded = function () { eventBus.off(Events.INIT_FRAGMENT_LOADED, onInitDataLoaded); // check initCache - cache = initCache.extract(chunk.streamId, chunk.representationId); + cache = initCache.extract(chunk.streamId, chunk.representation.id); expect(cache.bytes).to.equal(chunk.bytes); done(); }; @@ -253,11 +259,14 @@ describe('BufferController', function () { const event = { chunk: { streamId: streamInfo.id, - mediaInfo: { - type: 'video' - }, bytes: 'data', - quality: 2 + quality: 2, + representation: { + id: 'representationId', + mediaInfo: { + type: 'video' + }, + } } }; const onMediaFragmentLoaded = function () { @@ -278,8 +287,11 @@ describe('BufferController', function () { streamId: streamInfo.id, bytes: 'data', quality: 2, - mediaInfo: { - type: 'video' + representation: { + id: 'representationId', + mediaInfo: { + type: 'video' + }, } } }; @@ -295,7 +307,6 @@ describe('BufferController', function () { }); describe('Method updateBufferTimestampOffset', function () { - let adapterStub; beforeEach(function (done) { bufferController.initialize(mediaSourceMock); @@ -307,15 +318,9 @@ describe('BufferController', function () { done(e); }); - adapterStub = sinon.stub(adapterMock, 'convertRepresentationToRepresentationInfo'); - }); - - afterEach(function () { - adapterStub.restore(); - adapterStub = null; }); - it('should not update buffer timestamp offset if no representationInfo is provided', function (done) { + it('should not update buffer timestamp offset if no voRepresentation is provided', function (done) { expect(mediaSourceMock.buffers[0].timestampOffset).to.equal(1); // send event @@ -330,12 +335,12 @@ describe('BufferController', function () { }); - it('should update buffer timestamp offset if representationInfo is provided', function (done) { + it('should update buffer timestamp offset if voRepresentation is provided', function (done) { expect(mediaSourceMock.buffers[0].timestampOffset).to.equal(1); - const representationInfo = { MSETimeOffset: 2 }; + const representation = { mseTimeOffset: 2 }; // send event - bufferController.updateBufferTimestampOffset(representationInfo) + bufferController.updateBufferTimestampOffset(representation) .then(() => { expect(mediaSourceMock.buffers[0].timestampOffset).to.equal(2); done(); diff --git a/test/unit/streaming.controllers.FragmentController.js b/test/unit/streaming.controllers.FragmentController.js index 6515889fd4..a29403c481 100644 --- a/test/unit/streaming.controllers.FragmentController.js +++ b/test/unit/streaming.controllers.FragmentController.js @@ -51,7 +51,7 @@ describe('FragmentController', function () { mediaType: 'video', isInitializationRequest() { return true; }, type: 'InitializationSegment', - mediaInfo: {streamInfo: {}} + representation: {mediaInfo: {streamInfo: {}}} }, sender: videoFragmentModel}); }); @@ -63,6 +63,6 @@ describe('FragmentController', function () { }; eventBus.on(Events.SERVICE_LOCATION_BASE_URL_BLACKLIST_ADD, onInitFragmentLoadedWithError, this); - eventBus.trigger(Events.FRAGMENT_LOADING_COMPLETED, {error: {}, response: {}, request: {mediaType: 'video', isInitializationRequest() { return true; }, type: 'InitializationSegment', mediaInfo: {streamInfo: {}}}, sender: videoFragmentModel}); + eventBus.trigger(Events.FRAGMENT_LOADING_COMPLETED, {error: {}, response: {}, request: {mediaType: 'video', isInitializationRequest() { return true; }, type: 'InitializationSegment', representation: {mediaInfo: {streamInfo: {}}}}, sender: videoFragmentModel}); }); }); diff --git a/test/unit/streaming.controllers.MediaController.js b/test/unit/streaming.controllers.MediaController.js index 02c5d5d761..8b36072d76 100644 --- a/test/unit/streaming.controllers.MediaController.js +++ b/test/unit/streaming.controllers.MediaController.js @@ -90,7 +90,7 @@ describe('MediaController', function () { audioChannelConfiguration: [{ schemeIdUri: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', value: '2' }] }; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.false; }); @@ -114,7 +114,7 @@ describe('MediaController', function () { accessibility: { schemeIdUri: 'urn:scheme:test:2:2023', value: 'description' }, audioChannelConfiguration: [{ schemeIdUri: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', value: '2' }] }; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.false; }); @@ -138,7 +138,7 @@ describe('MediaController', function () { accessibility: { schemeIdUri: 'urn:scheme:test:2023', value: 'description' }, audioChannelConfiguration: [{ schemeIdUri: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', value: '2' }] }; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.false; }); @@ -162,7 +162,7 @@ describe('MediaController', function () { accessibility: [{ schemeIdUri: 'urn:mpeg:dash:role:2011', value: 'caption' }], audioChannelConfiguration: [{ schemeIdUri: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', value: '2' }] }; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.true; }); @@ -178,7 +178,7 @@ describe('MediaController', function () { accessibility: [{ schemeIdUri: 'urn:mpeg:dash:role:2011', value: 'caption' }], audioChannelConfiguration: [{ schemeIdUri: 'urn:mpeg:mpegB:cicp:ChannelConfiguration', value: '2' }] }; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.false; }); @@ -195,7 +195,7 @@ describe('MediaController', function () { let track2 = null; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.false; }); @@ -204,7 +204,7 @@ describe('MediaController', function () { let track1 = null; let track2 = null; - let equal = mediaController.isTracksEqual(track1, track2); + let equal = mediaController.areTracksEqual(track1, track2); expect(equal).to.be.true; }); }); @@ -530,7 +530,7 @@ describe('MediaController', function () { mediaController.addTrack(enTrack); mediaController.addTrack(enADTrack); mediaController.addTrack(esTrack); - + let trackList = mediaController.getTracksFor(trackType, streamInfo.id); expect(trackList).to.have.lengthOf(3); expect(objectUtils.areEqual(trackList[0], enTrack)).to.be.true; @@ -654,7 +654,7 @@ describe('MediaController', function () { currentTrack = mediaController.getCurrentTrackFor(trackType, streamInfo.id); expect(objectUtils.areEqual(currentTrack, frTrack)).to.be.true; }); - + it('should not check initial media settings to choose initial track when it has already selected a track', function () { mediaController.addTrack(frTrack); mediaController.addTrack(qtzTrack); diff --git a/test/unit/streaming.controllers.ScheduleController.js b/test/unit/streaming.controllers.ScheduleController.js index 26ee6e58ac..b2ec1d8389 100644 --- a/test/unit/streaming.controllers.ScheduleController.js +++ b/test/unit/streaming.controllers.ScheduleController.js @@ -55,8 +55,8 @@ describe('ScheduleController', function () { expect(result).to.be.NaN; }); - it('should return NaN if representationInfo is undefined', () => { - representationController.getCurrentRepresentationInfo = function () { + it('should return NaN if voRepresentation is undefined', () => { + representationController.getCurrentRepresentation = function () { return undefined } scheduleController = ScheduleController(context).create({ @@ -90,7 +90,7 @@ describe('ScheduleController', function () { }) it('should return 16 (value returns by getCurrentBufferLevel of DashMetricsMock + 1) if current representation is audio and videoTrackPresent is true', () => { - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return {} } scheduleController.initialize(true); @@ -100,7 +100,7 @@ describe('ScheduleController', function () { it('should return 12 (DEFAULT_MIN_BUFFER_TIME of MediaPlayerModelMock) if current representation is audio and videoTrackPresent is false', () => { scheduleController.initialize(false); - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -111,7 +111,7 @@ describe('ScheduleController', function () { scheduleController.initialize(false); abrController.isPlayingAtTopQuality = () => true; streamInfo.manifestInfo = { duration: 10 }; - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -122,7 +122,7 @@ describe('ScheduleController', function () { scheduleController.initialize(false); abrController.isPlayingAtTopQuality = () => true; streamInfo.manifestInfo = { duration: Infinity }; - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -147,7 +147,7 @@ describe('ScheduleController', function () { it('should return 15 (value returns by getCurrentBufferLevel of DashMetricsMock) if current representation is video', () => { scheduleController.initialize(true); - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -158,7 +158,7 @@ describe('ScheduleController', function () { scheduleController.initialize(false); abrController.isPlayingAtTopQuality = () => true; streamInfo.manifestInfo = { duration: 10 }; - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -169,7 +169,7 @@ describe('ScheduleController', function () { scheduleController.initialize(false); abrController.isPlayingAtTopQuality = () => true; streamInfo.manifestInfo = { duration: Infinity }; - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return { mediaInfo: { streamInfo: streamInfo } } } const result = scheduleController.getBufferTarget(); @@ -195,16 +195,16 @@ describe('ScheduleController', function () { }) it('should return 0 if current representation is text, and subtitles are disabled', function () { - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return {} } const result = scheduleController.getBufferTarget(); expect(result).to.be.equal(0); }); - it('should return 6 (value returns by currentRepresentationInfo.fragmentDuration) if current representation is text, and subtitles are enabled', function () { + it('should return 6 (value returns by voRepresentation.fragmentDuration) if current representation is text, and subtitles are enabled', function () { textController.enableText(true); - representationController.getCurrentRepresentationInfo = function () { + representationController.getCurrentRepresentation = function () { return {fragmentDuration: 6} } const result = scheduleController.getBufferTarget(); diff --git a/test/unit/streaming.models.CmcdModel.js b/test/unit/streaming.models.CmcdModel.js index 0cf9f00eff..3b36d9d45b 100644 --- a/test/unit/streaming.models.CmcdModel.js +++ b/test/unit/streaming.models.CmcdModel.js @@ -11,6 +11,7 @@ import ThroughputControllerMock from './mocks/ThroughputControllerMock.js'; import {decodeCmcd} from '@svta/common-media-library'; import {expect} from 'chai'; + const context = {}; const eventBus = EventBus(context).getInstance(); @@ -23,7 +24,7 @@ const REQUEST_HEADER_NAME = 'CMCD-Request'; describe('CmcdModel', function () { let cmcdModel; - let abrControllerMock = new AbrControllerMock(); + let abrControllerMock; let dashMetricsMock = new DashMetricsMock(); let playbackControllerMock = new PlaybackControllerMock(); const throughputControllerMock = new ThroughputControllerMock(); @@ -31,6 +32,7 @@ describe('CmcdModel', function () { let settings = Settings(context).getInstance(); beforeEach(function () { + abrControllerMock = new AbrControllerMock(); cmcdModel = CmcdModel(context).getInstance(); cmcdModel.initialize(); settings.update({ streaming: { cmcd: { enabled: true, cid: null } } }); @@ -39,7 +41,6 @@ describe('CmcdModel', function () { afterEach(function () { cmcdModel.reset(); cmcdModel = null; - abrControllerMock.setTopBitrateInfo(null); settings.reset(); }); @@ -123,7 +124,13 @@ describe('CmcdModel', function () { const NEXT_OBJECT_URL = 'next_object'; const NEXT_OBJECT_RANGE = '100-500'; - abrControllerMock.setTopBitrateInfo({ bitrate: TOP_BITRATE }); + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: TOP_BITRATE / 1000 + } + ] + } throughputControllerMock.getSafeAverageThroughput = function () { return MEASURED_THROUGHPUT; }; @@ -131,7 +138,8 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + bandwidth: BITRATE, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION, url: 'http://test.url/firstRequest' }; @@ -217,7 +225,7 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; let headers = cmcdModel.getHeaderParameters(request); @@ -242,7 +250,7 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; cmcdModel.getHeaderParameters(request); // first initial request will set startup to true @@ -273,7 +281,7 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; cmcdModel.getHeaderParameters(request); // first initial request will set startup to true @@ -307,7 +315,7 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; let headers = cmcdModel.getHeaderParameters(request); @@ -335,7 +343,8 @@ describe('CmcdModel', function () { let request = { type: REQUEST_TYPE, - mediaType: MEDIA_TYPE + mediaType: MEDIA_TYPE, + representation: { mediaInfo: {} }, }; settings.update({ streaming: { cmcd: { enabled: true, cid: CID } } }); @@ -355,7 +364,8 @@ describe('CmcdModel', function () { let request = { type: REQUEST_TYPE, - mediaType: MEDIA_TYPE + mediaType: MEDIA_TYPE, + representation: { mediaInfo: {} }, }; settings.update({ streaming: { cmcd: { enabled: true, rtp: 10000 } } }); @@ -375,7 +385,8 @@ describe('CmcdModel', function () { let request = { type: REQUEST_TYPE, - mediaType: MEDIA_TYPE + mediaType: MEDIA_TYPE, + representation: { mediaInfo: {} }, }; settings.update({ @@ -484,7 +495,13 @@ describe('CmcdModel', function () { const NEXT_OBJECT_URL = 'next_object'; const NEXT_OBJECT_RANGE = '100-500'; - abrControllerMock.setTopBitrateInfo({ bitrate: TOP_BITRATE }); + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: 20 + } + ] + } throughputControllerMock.getSafeAverageThroughput = function () { return MEASURED_THROUGHPUT; }; @@ -492,7 +509,8 @@ describe('CmcdModel', function () { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + bandwidth: BITRATE, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION, url: 'http://test.url/firstRequest' }; @@ -563,11 +581,21 @@ describe('CmcdModel', function () { const DURATION = 987.213; const CHANGED_PLAYBACK_RATE = 2.4; + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: BITRATE / 1000 + } + ] + }; let request = { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { + mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + bitrateInKbit: BITRATE / 1000 + }, duration: DURATION }; let parameters = cmcdModel.getQueryParameter(request); @@ -588,11 +616,18 @@ describe('CmcdModel', function () { const BITRATE = 10000; const DURATION = 987.213; + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: BITRATE / 1000 + } + ] + }; let request = { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; cmcdModel.getQueryParameter(request); // first initial request will set startup to true @@ -617,11 +652,18 @@ describe('CmcdModel', function () { const BITRATE = 10000; const DURATION = 987.213; + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: BITRATE / 1000 + } + ] + }; let request = { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; cmcdModel.getQueryParameter(request); // first initial request will set startup to true @@ -649,11 +691,18 @@ describe('CmcdModel', function () { const BITRATE = 10000; const DURATION = 987.213; + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + bitrateInKbit: BITRATE / 1000 + } + ] + }; let request = { type: REQUEST_TYPE, mediaType: MEDIA_TYPE, quality: 0, - mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] }, + representation: { mediaInfo: { bitrateList: [{ bandwidth: BITRATE }] } }, duration: DURATION }; let parameters = cmcdModel.getQueryParameter(request); @@ -681,7 +730,8 @@ describe('CmcdModel', function () { let request = { type: REQUEST_TYPE, - mediaType: MEDIA_TYPE + mediaType: MEDIA_TYPE, + representation: { mediaInfo: {} }, }; settings.update({ streaming: { cmcd: { enabled: true, cid: CID } } }); @@ -701,9 +751,16 @@ describe('CmcdModel', function () { const REQUEST_TYPE = HTTPRequest.MEDIA_SEGMENT_TYPE; const MEDIA_TYPE = 'video'; + abrControllerMock.getPossibleVoRepresentations = () => { + return [ + { + } + ] + }; let request = { type: REQUEST_TYPE, - mediaType: MEDIA_TYPE + mediaType: MEDIA_TYPE, + representation: { mediaInfo: {} }, }; settings.update({ streaming: { cmcd: { enabled: true, rtp: 10000 } } }); diff --git a/test/unit/streaming.models.MediaPlayerModel.js b/test/unit/streaming.models.MediaPlayerModel.js index 069125ade6..0b1687b814 100644 --- a/test/unit/streaming.models.MediaPlayerModel.js +++ b/test/unit/streaming.models.MediaPlayerModel.js @@ -7,6 +7,7 @@ import PlaybackControllerMock from './mocks/PlaybackControllerMock.js'; import ServiceDescriptionController from '../../src/dash/controllers/ServiceDescriptionController.js'; import chai from 'chai'; + const expect = chai.expect; describe('MediaPlayerModel', function () { @@ -70,7 +71,7 @@ describe('MediaPlayerModel', function () { }) it('Should return catchup playback rates if specified in the settings', () => { - settings.update({ streaming: { liveCatchup: { playbackRate: { max: 0.3, min: -0.2 }} } }); + settings.update({ streaming: { liveCatchup: { playbackRate: { max: 0.3, min: -0.2 } } } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); const playbackRates = mediaPlayerModel.getCatchupPlaybackRates(); @@ -79,7 +80,7 @@ describe('MediaPlayerModel', function () { }); it('Should set playbackRate.min to 0 if only playbackRate.max is specified in the settings', () => { - settings.update({ streaming: { liveCatchup: { playbackRate: { max: 0.3 }} } }); + settings.update({ streaming: { liveCatchup: { playbackRate: { max: 0.3 } } } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); const playbackRates = mediaPlayerModel.getCatchupPlaybackRates(); @@ -88,7 +89,7 @@ describe('MediaPlayerModel', function () { }); it('Should set playbackRate.max to 0 if only playbackRate.min is specified in the settings', () => { - settings.update({ streaming: { liveCatchup: { playbackRate: { min: -0.2 }} } }); + settings.update({ streaming: { liveCatchup: { playbackRate: { min: -0.2 } } } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); const playbackRates = mediaPlayerModel.getCatchupPlaybackRates(); @@ -115,7 +116,7 @@ describe('MediaPlayerModel', function () { }); it('Should limit catchup playback rates from settings if they are beyond the rate thresholds', () => { - settings.update({ streaming: { liveCatchup: { playbackRate: { min: -0.8, max: 2.5 }} } }); + settings.update({ streaming: { liveCatchup: { playbackRate: { min: -0.8, max: 2.5 } } } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); const playbackRates = mediaPlayerModel.getCatchupPlaybackRates(); @@ -137,7 +138,7 @@ describe('MediaPlayerModel', function () { it('Should set catchup playback rates from settings to 0 if the sign of the set values is incorrect', () => { // i.e. if max rate is incorrectly negative, or min rate is incorrectly positive - settings.update({ streaming: { liveCatchup: { playbackRate: { min: 1.5, max: -2.0 }} } }); + settings.update({ streaming: { liveCatchup: { playbackRate: { min: 1.5, max: -2.0 } } } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); const playbackRates = mediaPlayerModel.getCatchupPlaybackRates(); @@ -163,48 +164,48 @@ describe('MediaPlayerModel', function () { } }); serviceDescriptionController.applyServiceDescription(dummyManifestInfo); - let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','audio'); + let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'audio'); expect(value).to.be.equal(1); - value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'video'); expect(value).to.be.equal(2); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'audio'); expect(value).to.be.equal(3); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'video'); expect(value).to.be.equal(4); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'audio'); expect(value).to.be.equal(5); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'video'); expect(value).to.be.equal(6); }) it('Should return abr bitrate parameter if specified via Service Description', () => { serviceDescriptionController.applyServiceDescription(dummyManifestInfo); - let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','audio'); + let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'audio'); expect(value).to.be.equal(9000); - value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'video'); expect(value).to.be.equal(9000); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'audio'); expect(value).to.be.equal(1000); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'video'); expect(value).to.be.equal(1000); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'audio'); expect(value).to.be.equal(5000); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'video'); expect(value).to.be.equal(5000); }) it('Should return -1 for abr bitrate parameters if not specified', () => { - let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','audio'); + let value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'audio'); expect(value).to.be.equal(-1); - value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('maxBitrate', 'video'); expect(value).to.be.equal(-1); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'audio'); expect(value).to.be.equal(-1); - value = mediaPlayerModel.getAbrBitrateParameter('minBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('minBitrate', 'video'); expect(value).to.be.equal(-1); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','audio'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'audio'); expect(value).to.be.equal(-1); - value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate','video'); + value = mediaPlayerModel.getAbrBitrateParameter('initialBitrate', 'video'); expect(value).to.be.equal(-1); }) @@ -312,7 +313,10 @@ describe('MediaPlayerModel', function () { it('should configure initial buffer level with stable buffer time lower than initial buffer level', function () { const bufferTimeDefault = settings.get().streaming.buffer.bufferTimeDefault; - const s = { streaming: { buffer: { initialBufferLevel: bufferTimeDefault + 10 } } }; + const s = { streaming: { buffer: { initialBufferLevel: bufferTimeDefault + 5 } } }; + playbackController.getLiveDelay = () => { + return NaN; + } settings.update(s); let value = mediaPlayerModel.getInitialBufferLevel(); diff --git a/test/unit/streaming.rules.abr.ABRRulesCollection.js b/test/unit/streaming.rules.abr.ABRRulesCollection.js index f1d0e1f01b..1907499ad0 100644 --- a/test/unit/streaming.rules.abr.ABRRulesCollection.js +++ b/test/unit/streaming.rules.abr.ABRRulesCollection.js @@ -279,18 +279,18 @@ describe('ABRRulesCollection', function () { }); it('should return an empty SwitchRequest when getMaxQuality function is called and rulesContext is undefined', function () { - const maxQuality = abrRulesCollection.getMaxQuality(); + const maxQuality = abrRulesCollection.getBestPossibleSwitchRequest(); - expect(maxQuality.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxQuality.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return an empty SwitchRequest when shouldAbandonFragment function is called and rulesContext is undefined', function () { const shouldAbandonFragment = abrRulesCollection.shouldAbandonFragment(); - expect(shouldAbandonFragment.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(shouldAbandonFragment.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return correct switch request in getMinSwitchRequest for a single item', () => { + it('should return correct switch request for a single item', () => { const srArray = [{ quality: 5, priority: SwitchRequest.PRIORITY.WEAK, @@ -309,6 +309,7 @@ describe('ABRRulesCollection', function () { const srArray = [ { quality: 6, + representation: { bitrateInKbit: 60 }, priority: SwitchRequest.PRIORITY.WEAK, reason: { throughput: 60 @@ -316,6 +317,7 @@ describe('ABRRulesCollection', function () { }, { quality: 4, + representation: { bitrateInKbit: 40 }, priority: SwitchRequest.PRIORITY.WEAK, reason: { throughput: 40 @@ -323,6 +325,7 @@ describe('ABRRulesCollection', function () { }, { quality: 5, + representation: { bitrateInKbit: 50 }, priority: SwitchRequest.PRIORITY.WEAK, reason: { throughput: 50 @@ -371,6 +374,7 @@ describe('ABRRulesCollection', function () { const srArray = [ { quality: 6, + representation: { bitrateInKbit: 60 }, priority: SwitchRequest.PRIORITY.DEFAULT, reason: { throughput: 60 @@ -378,6 +382,7 @@ describe('ABRRulesCollection', function () { }, { quality: 5, + representation: { bitrateInKbit: 50 }, priority: SwitchRequest.PRIORITY.STRONG, reason: { throughput: 50 @@ -385,6 +390,7 @@ describe('ABRRulesCollection', function () { }, { quality: 4, + representation: { bitrateInKbit: 40 }, priority: SwitchRequest.PRIORITY.STRONG, reason: { throughput: 40 @@ -392,6 +398,7 @@ describe('ABRRulesCollection', function () { }, { quality: 7, + representation: { bitrateInKbit: 70}, priority: SwitchRequest.PRIORITY.WEAK, reason: { throughput: 70 @@ -405,18 +412,6 @@ describe('ABRRulesCollection', function () { expect(sr.reason.throughput).to.be.equal(40); }); - it('should return correct switch request in getMinSwitchRequest for a single item without reason', () => { - const srArray = [{ - quality: 5, - priority: SwitchRequest.PRIORITY.WEAK - }]; - - const sr = abrRulesCollection.getMinSwitchRequest(srArray); - - expect(sr.quality).to.be.equal(5); - expect(sr.reason).to.be.null; - }); - }); diff --git a/test/unit/streaming.rules.abr.AbandonRequestsRule.js b/test/unit/streaming.rules.abr.AbandonRequestsRule.js index 5d7f37a5fe..057e105315 100644 --- a/test/unit/streaming.rules.abr.AbandonRequestsRule.js +++ b/test/unit/streaming.rules.abr.AbandonRequestsRule.js @@ -15,7 +15,7 @@ describe('AbandonRequestsRule', function () { const abandonRequestsRule = AbandonRequestsRule(context).create({}); const abandonRequest = abandonRequestsRule.shouldAbandon(); - expect(abandonRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(abandonRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return an empty switchRequest when shouldAbandon function is called with a mock parameter', function () { @@ -31,6 +31,6 @@ describe('AbandonRequestsRule', function () { const abandonRequest = abandonRequestsRule.shouldAbandon(rulesContextMock); - expect(abandonRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(abandonRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); }); diff --git a/test/unit/streaming.rules.abr.BolaRule.js b/test/unit/streaming.rules.abr.BolaRule.js index 20d658f9a9..ad51a9f55d 100644 --- a/test/unit/streaming.rules.abr.BolaRule.js +++ b/test/unit/streaming.rules.abr.BolaRule.js @@ -8,21 +8,21 @@ const bolaRule = BolaRule(context).create({}); const rulesContextMock = new RulesContextMock(); describe('BolaRule', function () { - it('should return an empty switchRequest when getMaxIndex function is called with an empty parameter', function () { - const maxIndexRequest = bolaRule.getMaxIndex(); + it('should return an empty switchRequest when getSwitchRequest function is called with an empty parameter', function () { + const maxIndexRequest = bolaRule.getSwitchRequest(); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an malformed parameter', function () { - const maxIndexRequest = bolaRule.getMaxIndex({}); + it('should return an empty switchRequest when getSwitchRequest function is called with an malformed parameter', function () { + const maxIndexRequest = bolaRule.getSwitchRequest({}); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an well formed parameter', function () { - const maxIndexRequest = bolaRule.getMaxIndex(rulesContextMock); + it('should return an empty switchRequest when getSwitchRequest function is called with an well formed parameter', function () { + const maxIndexRequest = bolaRule.getSwitchRequest(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); }); diff --git a/test/unit/streaming.rules.abr.DroppedFramesRule.js b/test/unit/streaming.rules.abr.DroppedFramesRule.js index 9c003d6b45..be6eea1fea 100644 --- a/test/unit/streaming.rules.abr.DroppedFramesRule.js +++ b/test/unit/streaming.rules.abr.DroppedFramesRule.js @@ -8,21 +8,21 @@ const droppedFramesRule = DroppedFramesRule(context).create({}); const rulesContextMock = new RulesContextMock(); describe('DroppedFramesRule', function () { - it('should return an empty switchRequest when getMaxIndex function is called with an empty parameter', function () { - const maxIndexRequest = droppedFramesRule.getMaxIndex(); + it('should return an empty switchRequest when getSwitchRequest function is called with an empty parameter', function () { + const maxIndexRequest = droppedFramesRule.getSwitchRequest(); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an malformed parameter', function () { - const maxIndexRequest = droppedFramesRule.getMaxIndex({}); + it('should return an empty switchRequest when getSwitchRequest function is called with an malformed parameter', function () { + const maxIndexRequest = droppedFramesRule.getSwitchRequest({}); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an well formed parameter', function () { - const maxIndexRequest = droppedFramesRule.getMaxIndex(rulesContextMock); + it('should return an empty switchRequest when getSwitchRequest function is called with an well formed parameter', function () { + const maxIndexRequest = droppedFramesRule.getSwitchRequest(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); }); diff --git a/test/unit/streaming.rules.abr.InsufficientBufferRule.js b/test/unit/streaming.rules.abr.InsufficientBufferRule.js index e77149d603..699482fce8 100644 --- a/test/unit/streaming.rules.abr.InsufficientBufferRule.js +++ b/test/unit/streaming.rules.abr.InsufficientBufferRule.js @@ -1,6 +1,5 @@ import InsufficientBufferRule from '../../src/streaming/rules/abr/InsufficientBufferRule.js'; import SwitchRequest from '../../src/streaming/rules/SwitchRequest.js'; -import Constants from '../../src/streaming/constants/Constants.js'; import EventBus from '../../src/core/EventBus.js'; import Events from '../../src/core/events/Events.js'; import DashMetricsMock from './mocks/DashMetricsMock.js'; @@ -20,21 +19,18 @@ describe('InsufficientBufferRule', function () { }); }); - it('should return an empty switchRequest when getMaxIndex function is called with an empty parameter', function () { - const maxIndexRequest = insufficientBufferRule.getMaxIndex(); + it('should return an empty switchRequest when getSwitchRequest function is called with an empty parameter', function () { + const maxIndexRequest = insufficientBufferRule.getSwitchRequest(); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an malformed parameter', function () { - const maxIndexRequest = insufficientBufferRule.getMaxIndex({}); + it('should return an empty switchRequest when getSwitchRequest function is called with an malformed parameter', function () { + const maxIndexRequest = insufficientBufferRule.getSwitchRequest({}); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should throw an exception when attempting to call getMaxIndex While the config attribute has not been set properly', function () { - expect(insufficientBufferRule.getMaxIndex.bind(insufficientBufferRule, { getMediaType: {} })).to.throw(Constants.MISSING_CONFIG_ERROR); - }); it('should return an empty switch request when bufferState is empty', function () { const dashMetricsMock = new DashMetricsMock(); @@ -51,7 +47,7 @@ describe('InsufficientBufferRule', function () { id: 'DUMMY_STREAM-01' }; }, - getRepresentationInfo: function () { + getRepresentation: function () { return { fragmentDuration: 4 }; }, getScheduleController: function () { @@ -71,8 +67,8 @@ describe('InsufficientBufferRule', function () { settings }); - const maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + const maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return an empty switch request when first call is done with a buffer in state bufferStalled', function () { @@ -93,7 +89,7 @@ describe('InsufficientBufferRule', function () { id: 'DUMMY_STREAM-01' }; }, - getRepresentationInfo: function () { + getRepresentation: function () { return { fragmentDuration: 4 }; }, getScheduleController: function () { @@ -113,8 +109,8 @@ describe('InsufficientBufferRule', function () { settings }); dashMetricsMock.addBufferState('video', bufferState); - let maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + let maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return an empty switch request with a buffer in state bufferLoaded and fragmentDuration is NaN', function () { @@ -135,7 +131,7 @@ describe('InsufficientBufferRule', function () { id: 'DUMMY_STREAM-01' }; }, - getRepresentationInfo: function () { + getRepresentation: function () { return { fragmentDuration: NaN }; }, getScheduleController: function () { @@ -155,15 +151,15 @@ describe('InsufficientBufferRule', function () { settings }); dashMetricsMock.addBufferState('video', bufferState); - const maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + const maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return index 0 after two fragments appended with a buffer in state bufferLoaded and fragmentDuration is NaN and then bufferStalled with fragmentDuration > 0', function () { let bufferState = { state: 'bufferLoaded' }; - let representationInfo = { fragmentDuration: NaN }; + let voRepresentation = { fragmentDuration: NaN, id: 1 }; const dashMetricsMock = new DashMetricsMock(); const rulesContextMock = { getMediaInfo: function () { @@ -172,14 +168,19 @@ describe('InsufficientBufferRule', function () { return 'video'; }, getAbrController: function () { + return { + getOptimalRepresentationForBitrate: function () { + return voRepresentation + } + } }, getStreamInfo: function () { return { id: 'DUMMY_STREAM-01' }; }, - getRepresentationInfo: function () { - return representationInfo; + getRepresentation: function () { + return voRepresentation; }, getScheduleController: function () { return { @@ -209,16 +210,16 @@ describe('InsufficientBufferRule', function () { bufferState.state = 'bufferStalled'; dashMetricsMock.addBufferState('video', bufferState); - representationInfo.fragmentDuration = 4; - const maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(0); + voRepresentation.fragmentDuration = 4; + const maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation.id).to.be.equal(1); }); it('should return index -1 for zero and one fragments appended after a seek, then index 0 afterwards when bufferStalled', function () { const bufferState = { state: 'bufferStalled' }; - const representationInfo = { fragmentDuration: 4 }; + const voRepresentation = { fragmentDuration: 4, id: 1 }; const dashMetricsMock = new DashMetricsMock(); dashMetricsMock.addBufferState('video', bufferState); @@ -229,9 +230,14 @@ describe('InsufficientBufferRule', function () { return 'video'; }, getAbrController: function () { + return { + getOptimalRepresentationForBitrate: function () { + return voRepresentation + } + } }, - getRepresentationInfo: function () { - return representationInfo; + getRepresentation: function () { + return voRepresentation; }, getStreamInfo: function () { return { @@ -256,17 +262,17 @@ describe('InsufficientBufferRule', function () { settings }); - let maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(-1); + let maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); let e = { mediaType: 'video', startTime: 0 }; eventBus.trigger(Events.BYTES_APPENDED_END_FRAGMENT, e); - maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(-1); + maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); e = { mediaType: 'video', startTime: 4 }; eventBus.trigger(Events.BYTES_APPENDED_END_FRAGMENT, e); - maxIndexRequest = rule.getMaxIndex(rulesContextMock); - expect(maxIndexRequest.quality).to.be.equal(0); + maxIndexRequest = rule.getSwitchRequest(rulesContextMock); + expect(maxIndexRequest.representation.id).to.be.equal(1); }); }); diff --git a/test/unit/streaming.rules.abr.SwitchHistoryRule.js b/test/unit/streaming.rules.abr.SwitchHistoryRule.js index 93de305c03..e19f404a30 100644 --- a/test/unit/streaming.rules.abr.SwitchHistoryRule.js +++ b/test/unit/streaming.rules.abr.SwitchHistoryRule.js @@ -7,16 +7,16 @@ const context = {}; const switchHistoryRule = SwitchHistoryRule(context).create(); describe('SwitchHistoryRule', function () { - it('should return an empty switchRequest when getMaxIndex function is called with an empty parameter', function () { - const switchRequest = switchHistoryRule.getMaxIndex(); + it('should return an empty switchRequest when getSwitchRequest function is called with an empty parameter', function () { + const switchRequest = switchHistoryRule.getSwitchRequest(); - expect(switchRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(switchRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); it('should return an switchRequest with quality equals 0 when one switchRequest equals to {drops: 7, noDrops: 0, dropSize: 4}, a division by zero occurs', function () { let rulesContextMock = new RulesContextMock(); - const switchRequest = switchHistoryRule.getMaxIndex(rulesContextMock); + const switchRequest = switchHistoryRule.getSwitchRequest(rulesContextMock); - expect(switchRequest.quality).to.be.equal(0); + expect(switchRequest.representation.id).to.be.equal(1); }); }); diff --git a/test/unit/streaming.rules.abr.ThroughputRule.js b/test/unit/streaming.rules.abr.ThroughputRule.js index 9949af4559..2aab6fd5b8 100644 --- a/test/unit/streaming.rules.abr.ThroughputRule.js +++ b/test/unit/streaming.rules.abr.ThroughputRule.js @@ -6,16 +6,16 @@ const context = {}; const throughputRule = ThroughputRule(context).create({}); describe('ThroughputRule', function () { - it('should return an empty switchRequest when getMaxIndex function is called with an empty parameter', function () { - const maxIndexRequest = throughputRule.getMaxIndex(); + it('should return an empty switchRequest when getSwitchRequest function is called with an empty parameter', function () { + const maxIndexRequest = throughputRule.getSwitchRequest(); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); - it('should return an empty switchRequest when getMaxIndex function is called with an malformed parameter', function () { - const maxIndexRequest = throughputRule.getMaxIndex({}); + it('should return an empty switchRequest when getSwitchRequest function is called with an malformed parameter', function () { + const maxIndexRequest = throughputRule.getSwitchRequest({}); - expect(maxIndexRequest.quality).to.be.equal(SwitchRequest.NO_CHANGE); + expect(maxIndexRequest.representation).to.be.equal(SwitchRequest.NO_CHANGE); }); }); diff --git a/test/unit/streaming.text.NotFragmentedTextBufferController.js b/test/unit/streaming.text.NotFragmentedTextBufferController.js index 298fc3055c..8e9475b2ef 100644 --- a/test/unit/streaming.text.NotFragmentedTextBufferController.js +++ b/test/unit/streaming.text.NotFragmentedTextBufferController.js @@ -8,6 +8,7 @@ import StreamProcessorMock from './mocks/StreamProcessorMock.js'; import MediaSourceMock from './mocks/MediaSourceMock.js'; import chai from 'chai'; + const expect = chai.expect; const context = {}; @@ -176,8 +177,10 @@ describe('NotFragmentedTextBufferController', function () { let event = { chunk: { streamId: 'id', - mediaInfo: { - type: testType + representation: { + mediaInfo: { + type: testType + } }, bytes: 'data' } diff --git a/test/unit/streaming.text.TextSourceBuffer.js b/test/unit/streaming.text.TextSourceBuffer.js index aa7b17e5e1..5a2ccdbb7e 100644 --- a/test/unit/streaming.text.TextSourceBuffer.js +++ b/test/unit/streaming.text.TextSourceBuffer.js @@ -5,6 +5,7 @@ import ErrorHandlerMock from './mocks/ErrorHandlerMock.js'; import AdapterMock from './mocks/AdapterMock.js'; import chai from 'chai'; + const expect = chai.expect; const context = {}; @@ -23,10 +24,12 @@ describe('TextSourceBuffer', function () { it('call to append function with invalid tttml data should triggered a parse error', function () { const buffer = new ArrayBuffer(8); textSourceBuffer.append(buffer, { - mediaInfo: { - type: 'text', - mimeType: 'application/ttml+xml', - codec: 'application/ttml+xml;codecs=\'undefined\'' + representation: { + mediaInfo: { + type: 'text', + mimeType: 'application/ttml+xml', + codec: 'application/ttml+xml;codecs=\'undefined\'' + } } }); expect(errorHandlerMock.errorCode).to.equal(Errors.TIMED_TEXT_ERROR_ID_PARSE_CODE); diff --git a/test/unit/streaming.thumbnail.ThumbnailController.js b/test/unit/streaming.thumbnail.ThumbnailController.js index 3565185fca..022152c2ad 100644 --- a/test/unit/streaming.thumbnail.ThumbnailController.js +++ b/test/unit/streaming.thumbnail.ThumbnailController.js @@ -47,7 +47,10 @@ const sampleRepresentation = { essentialProperties: [{ schemeIdUri: 'http://dashif.org/guidelines/thumbnail_tile', value: '10x1' - }] + }], + mediaInfo: { + type: 'image' + } }; const sampleRepresentation2 = { @@ -65,7 +68,10 @@ const sampleRepresentation2 = { essentialProperties: [{ schemeIdUri: 'http://dashif.org/guidelines/thumbnail_tile', value: '10x20' - }] + }], + mediaInfo: { + type: 'image' + } }; const sampleRepresentation3 = { @@ -83,10 +89,13 @@ const sampleRepresentation3 = { essentialProperties: [{ schemeIdUri: 'http://dashif.org/thumbnail_tile', value: '50x10' - }] + }], + mediaInfo: { + type: 'image' + } }; -describe('Thumbnails', function () { +describe('ThumbnailController', function () { describe('ThumbnailController not initializeed', function () { const objectsHelper = new ObjectsHelper(); const adapter = new AdapterMock(); @@ -114,7 +123,7 @@ describe('Thumbnails', function () { expect(thumbnail).to.be.null; // jshint ignore:line }); - expect(thumbnailController.getBitrateList()).to.be.empty; // jshint ignore:line + expect(thumbnailController.getPossibleVoRepresentations()).to.be.empty; // jshint ignore:line }); }); @@ -182,11 +191,11 @@ describe('Thumbnails', function () { }); }); - it('should return list of available bitrates', function () { - const bitrates = thumbnailController.getBitrateList(); - expect(bitrates).to.have.lengthOf(1); - expect(bitrates[0].mediaType).to.equal('image'); - expect(bitrates[0].bitrate).to.equal(2000); + it('should return list of available representations', function () { + const possibleVoRepresentations = thumbnailController.getPossibleVoRepresentations(); + expect(possibleVoRepresentations).to.have.lengthOf(1); + expect(possibleVoRepresentations[0].mediaInfo.type).to.equal('image'); + expect(possibleVoRepresentations[0].bandwidth).to.equal(2000); }); it('tracks selection', function () { diff --git a/test/unit/streaming.utils.InitCache.js b/test/unit/streaming.utils.InitCache.js index 2a7e104c8b..9163ecc912 100644 --- a/test/unit/streaming.utils.InitCache.js +++ b/test/unit/streaming.utils.InitCache.js @@ -30,7 +30,7 @@ describe('InitCache', function () { it('should return when extract is called and representationId is undefined', () => { - initCache.save({streamId: 'defauldId', representationId: 'video_880k', dataTest: 'videoSegment'}); + initCache.save({ streamId: 'defauldId', representation: { id: 'video_880k' }, dataTest: 'videoSegment' }); const streamId = 'defauldId'; const representationId = 'video_880k';