Source: lib/util/stream_utils.js

  1. /*! @license
  2. * Shaka Player
  3. * Copyright 2016 Google LLC
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. goog.provide('shaka.util.StreamUtils');
  7. goog.require('goog.asserts');
  8. goog.require('shaka.config.AutoShowText');
  9. goog.require('shaka.device.DeviceFactory');
  10. goog.require('shaka.device.IDevice');
  11. goog.require('shaka.lcevc.Dec');
  12. goog.require('shaka.log');
  13. goog.require('shaka.media.Capabilities');
  14. goog.require('shaka.text.TextEngine');
  15. goog.require('shaka.util.Functional');
  16. goog.require('shaka.util.LanguageUtils');
  17. goog.require('shaka.util.ManifestParserUtils');
  18. goog.require('shaka.util.MimeUtils');
  19. goog.require('shaka.util.MultiMap');
  20. goog.require('shaka.util.ObjectUtils');
  21. goog.requireType('shaka.drm.DrmEngine');
  22. /**
  23. * @summary A set of utility functions for dealing with Streams and Manifests.
  24. * @export
  25. */
  26. shaka.util.StreamUtils = class {
  27. /**
  28. * In case of multiple usable codecs, choose one based on lowest average
  29. * bandwidth and filter out the rest.
  30. * Also filters out variants that have too many audio channels.
  31. * @param {!shaka.extern.Manifest} manifest
  32. * @param {!Array<string>} preferredVideoCodecs
  33. * @param {!Array<string>} preferredAudioCodecs
  34. * @param {!Array<string>} preferredDecodingAttributes
  35. * @param {!Array<string>} preferredTextFormats
  36. */
  37. static chooseCodecsAndFilterManifest(manifest, preferredVideoCodecs,
  38. preferredAudioCodecs, preferredDecodingAttributes, preferredTextFormats) {
  39. const StreamUtils = shaka.util.StreamUtils;
  40. const MimeUtils = shaka.util.MimeUtils;
  41. if (preferredTextFormats.length) {
  42. let subset = manifest.textStreams;
  43. for (const textFormat of preferredTextFormats) {
  44. const filtered = subset.filter((textStream) => {
  45. if (textStream.codecs.startsWith(textFormat) ||
  46. textStream.mimeType.startsWith(textFormat)) {
  47. return true;
  48. }
  49. return false;
  50. });
  51. if (filtered.length) {
  52. subset = filtered;
  53. break;
  54. }
  55. }
  56. manifest.textStreams = subset;
  57. }
  58. let variants = manifest.variants;
  59. // To start, choose the codecs based on configured preferences if available.
  60. if (preferredVideoCodecs.length || preferredAudioCodecs.length) {
  61. variants = StreamUtils.choosePreferredCodecs(variants,
  62. preferredVideoCodecs, preferredAudioCodecs);
  63. }
  64. if (preferredDecodingAttributes.length) {
  65. // group variants by resolution and choose preferred variants only
  66. /** @type {!shaka.util.MultiMap<shaka.extern.Variant>} */
  67. const variantsByResolutionMap = new shaka.util.MultiMap();
  68. for (const variant of variants) {
  69. variantsByResolutionMap
  70. .push(String(variant.video.width || 0), variant);
  71. }
  72. const bestVariants = [];
  73. variantsByResolutionMap.forEach((width, variantsByResolution) => {
  74. let highestMatch = 0;
  75. let matchingVariants = [];
  76. for (const variant of variantsByResolution) {
  77. const matchCount = preferredDecodingAttributes.filter(
  78. (attribute) => variant.decodingInfos[0][attribute],
  79. ).length;
  80. if (matchCount > highestMatch) {
  81. highestMatch = matchCount;
  82. matchingVariants = [variant];
  83. } else if (matchCount == highestMatch) {
  84. matchingVariants.push(variant);
  85. }
  86. }
  87. bestVariants.push(...matchingVariants);
  88. });
  89. variants = bestVariants;
  90. }
  91. const audioStreamsSet = new Set();
  92. const videoStreamsSet = new Set();
  93. for (const variant of variants) {
  94. if (variant.audio) {
  95. audioStreamsSet.add(variant.audio);
  96. }
  97. if (variant.video) {
  98. videoStreamsSet.add(variant.video);
  99. }
  100. }
  101. const audioStreams = Array.from(audioStreamsSet).sort((v1, v2) => {
  102. return v1.bandwidth - v2.bandwidth;
  103. });
  104. const validAudioIds = [];
  105. const validAudioStreamsMap = new Map();
  106. const getAudioId = (stream) => {
  107. let id = stream.language + (stream.channelsCount || 0) +
  108. (stream.audioSamplingRate || 0) + stream.roles.join(',') +
  109. stream.label + stream.groupId + stream.fastSwitching;
  110. if (stream.dependencyStream) {
  111. id += stream.dependencyStream.baseOriginalId || '';
  112. }
  113. return id;
  114. };
  115. for (const stream of audioStreams) {
  116. const groupId = getAudioId(stream);
  117. const validAudioStreams = validAudioStreamsMap.get(groupId) || [];
  118. if (!validAudioStreams.length) {
  119. validAudioStreams.push(stream);
  120. validAudioIds.push(stream.id);
  121. } else {
  122. const previousStream = validAudioStreams[validAudioStreams.length - 1];
  123. const previousCodec =
  124. MimeUtils.getNormalizedCodec(previousStream.codecs);
  125. const currentCodec =
  126. MimeUtils.getNormalizedCodec(stream.codecs);
  127. if (previousCodec == currentCodec) {
  128. if (!stream.bandwidth || !previousStream.bandwidth ||
  129. stream.bandwidth > previousStream.bandwidth) {
  130. validAudioStreams.push(stream);
  131. validAudioIds.push(stream.id);
  132. }
  133. }
  134. }
  135. validAudioStreamsMap.set(groupId, validAudioStreams);
  136. }
  137. // Keys based in MimeUtils.getNormalizedCodec. Lower is better
  138. const videoCodecPreference = {
  139. 'vp8': 1,
  140. 'avc': 1,
  141. 'dovi-avc': 0.95,
  142. 'vp9': 0.9,
  143. 'vp09': 0.9,
  144. 'hevc': 0.85,
  145. 'dovi-hevc': 0.8,
  146. 'dovi-p5': 0.75,
  147. 'av01': 0.7,
  148. 'dovi-av1': 0.65,
  149. 'vvc': 0.6,
  150. };
  151. const videoStreams = Array.from(videoStreamsSet)
  152. .sort((v1, v2) => {
  153. if (!v1.bandwidth || !v2.bandwidth || v1.bandwidth == v2.bandwidth) {
  154. if (v1.codecs && v2.codecs && v1.codecs != v2.codecs &&
  155. v1.width == v2.width) {
  156. const v1Codecs = MimeUtils.getNormalizedCodec(v1.codecs);
  157. const v2Codecs = MimeUtils.getNormalizedCodec(v2.codecs);
  158. if (v1Codecs != v2Codecs) {
  159. const indexV1 = videoCodecPreference[v1Codecs] || 1;
  160. const indexV2 = videoCodecPreference[v2Codecs] || 1;
  161. return indexV1 - indexV2;
  162. }
  163. }
  164. return v1.width - v2.width;
  165. }
  166. return v1.bandwidth - v2.bandwidth;
  167. });
  168. const isChangeTypeSupported =
  169. shaka.media.Capabilities.isChangeTypeSupported();
  170. const validVideoIds = [];
  171. const validVideoStreamsMap = new Map();
  172. const getVideoGroupId = (stream) => {
  173. let id = String(stream.width || '') + String(stream.height || '') +
  174. String(Math.round(stream.frameRate || 0)) + (stream.hdr || '') +
  175. stream.fastSwitching;
  176. if (stream.dependencyStream) {
  177. id += stream.dependencyStream.baseOriginalId || '';
  178. }
  179. return id;
  180. };
  181. for (const stream of videoStreams) {
  182. const groupId = getVideoGroupId(stream);
  183. const validVideoStreams = validVideoStreamsMap.get(groupId) || [];
  184. if (!validVideoStreams.length) {
  185. validVideoStreams.push(stream);
  186. validVideoIds.push(stream.id);
  187. } else {
  188. const previousStream = validVideoStreams[validVideoStreams.length - 1];
  189. if (!isChangeTypeSupported) {
  190. const previousCodec =
  191. MimeUtils.getNormalizedCodec(previousStream.codecs);
  192. const currentCodec =
  193. MimeUtils.getNormalizedCodec(stream.codecs);
  194. if (previousCodec !== currentCodec) {
  195. continue;
  196. }
  197. }
  198. const previousCodec =
  199. MimeUtils.getNormalizedCodec(previousStream.codecs);
  200. const currentCodec =
  201. MimeUtils.getNormalizedCodec(stream.codecs);
  202. if (previousCodec == currentCodec) {
  203. if (!stream.bandwidth || !previousStream.bandwidth ||
  204. stream.bandwidth > previousStream.bandwidth) {
  205. validVideoStreams.push(stream);
  206. validVideoIds.push(stream.id);
  207. }
  208. }
  209. }
  210. validVideoStreamsMap.set(groupId, validVideoStreams);
  211. }
  212. // Filter out any variants that don't match, forcing AbrManager to choose
  213. // from a single video codec and a single audio codec possible.
  214. manifest.variants = manifest.variants.filter((variant) => {
  215. const audio = variant.audio;
  216. const video = variant.video;
  217. if (audio) {
  218. if (!validAudioIds.includes(audio.id)) {
  219. shaka.log.debug('Dropping Variant (better codec available)', variant);
  220. return false;
  221. }
  222. }
  223. if (video) {
  224. if (!validVideoIds.includes(video.id)) {
  225. shaka.log.debug('Dropping Variant (better codec available)', variant);
  226. return false;
  227. }
  228. }
  229. return true;
  230. });
  231. }
  232. /**
  233. * Choose the codecs by configured preferred audio and video codecs.
  234. *
  235. * @param {!Array<shaka.extern.Variant>} variants
  236. * @param {!Array<string>} preferredVideoCodecs
  237. * @param {!Array<string>} preferredAudioCodecs
  238. * @return {!Array<shaka.extern.Variant>}
  239. */
  240. static choosePreferredCodecs(variants, preferredVideoCodecs,
  241. preferredAudioCodecs) {
  242. let subset = variants;
  243. for (const videoCodec of preferredVideoCodecs) {
  244. const filtered = subset.filter((variant) => {
  245. return variant.video && variant.video.codecs.startsWith(videoCodec);
  246. });
  247. if (filtered.length) {
  248. subset = filtered;
  249. break;
  250. }
  251. }
  252. for (const audioCodec of preferredAudioCodecs) {
  253. const filtered = subset.filter((variant) => {
  254. return variant.audio && variant.audio.codecs.startsWith(audioCodec);
  255. });
  256. if (filtered.length) {
  257. subset = filtered;
  258. break;
  259. }
  260. }
  261. return subset;
  262. }
  263. /**
  264. * Filter the variants in |manifest| to only include the variants that meet
  265. * the given restrictions.
  266. *
  267. * @param {!shaka.extern.Manifest} manifest
  268. * @param {shaka.extern.Restrictions} restrictions
  269. * @param {shaka.extern.Resolution} maxHwResolution
  270. */
  271. static filterByRestrictions(manifest, restrictions, maxHwResolution) {
  272. manifest.variants = manifest.variants.filter((variant) => {
  273. return shaka.util.StreamUtils.meetsRestrictions(
  274. variant, restrictions, maxHwResolution);
  275. });
  276. }
  277. /**
  278. * @param {shaka.extern.Variant} variant
  279. * @param {shaka.extern.Restrictions} restrictions
  280. * Configured restrictions from the user.
  281. * @param {shaka.extern.Resolution} maxHwRes
  282. * The maximum resolution the hardware can handle.
  283. * This is applied separately from user restrictions because the setting
  284. * should not be easily replaced by the user's configuration.
  285. * @return {boolean}
  286. * @export
  287. */
  288. static meetsRestrictions(variant, restrictions, maxHwRes) {
  289. /** @type {function(number, number, number):boolean} */
  290. const inRange = (x, min, max) => {
  291. return x >= min && x <= max;
  292. };
  293. const video = variant.video;
  294. // |video.width| and |video.height| can be undefined, which breaks
  295. // the math, so make sure they are there first.
  296. if (video && video.width && video.height) {
  297. let videoWidth = video.width;
  298. let videoHeight = video.height;
  299. if (videoHeight > videoWidth) {
  300. // Vertical video.
  301. [videoWidth, videoHeight] = [videoHeight, videoWidth];
  302. }
  303. if (!inRange(videoWidth,
  304. restrictions.minWidth,
  305. Math.min(restrictions.maxWidth, maxHwRes.width))) {
  306. return false;
  307. }
  308. if (!inRange(videoHeight,
  309. restrictions.minHeight,
  310. Math.min(restrictions.maxHeight, maxHwRes.height))) {
  311. return false;
  312. }
  313. if (!inRange(video.width * video.height,
  314. restrictions.minPixels,
  315. restrictions.maxPixels)) {
  316. return false;
  317. }
  318. }
  319. // |variant.video.frameRate| can be undefined, which breaks
  320. // the math, so make sure they are there first.
  321. if (variant && variant.video && variant.video.frameRate) {
  322. if (!inRange(variant.video.frameRate,
  323. restrictions.minFrameRate,
  324. restrictions.maxFrameRate)) {
  325. return false;
  326. }
  327. }
  328. // |variant.audio.channelsCount| can be undefined, which breaks
  329. // the math, so make sure they are there first.
  330. if (variant && variant.audio && variant.audio.channelsCount) {
  331. if (!inRange(variant.audio.channelsCount,
  332. restrictions.minChannelsCount,
  333. restrictions.maxChannelsCount)) {
  334. return false;
  335. }
  336. }
  337. if (!inRange(variant.bandwidth,
  338. restrictions.minBandwidth,
  339. restrictions.maxBandwidth)) {
  340. return false;
  341. }
  342. return true;
  343. }
  344. /**
  345. * @param {!Array<shaka.extern.Variant>} variants
  346. * @param {shaka.extern.Restrictions} restrictions
  347. * @param {shaka.extern.Resolution} maxHwRes
  348. * @return {boolean} Whether the tracks changed.
  349. */
  350. static applyRestrictions(variants, restrictions, maxHwRes) {
  351. let tracksChanged = false;
  352. for (const variant of variants) {
  353. const originalAllowed = variant.allowedByApplication;
  354. variant.allowedByApplication = shaka.util.StreamUtils.meetsRestrictions(
  355. variant, restrictions, maxHwRes);
  356. if (originalAllowed != variant.allowedByApplication) {
  357. tracksChanged = true;
  358. }
  359. }
  360. return tracksChanged;
  361. }
  362. /**
  363. * Alters the given Manifest to filter out any unplayable streams.
  364. *
  365. * @param {shaka.drm.DrmEngine} drmEngine
  366. * @param {shaka.extern.Manifest} manifest
  367. * @param {!Array<string>=} preferredKeySystems
  368. * @param {!Object<string, string>=} keySystemsMapping
  369. */
  370. static async filterManifest(drmEngine, manifest, preferredKeySystems = [],
  371. keySystemsMapping = {}) {
  372. await shaka.util.StreamUtils.filterManifestByMediaCapabilities(
  373. drmEngine, manifest, manifest.offlineSessionIds.length > 0,
  374. preferredKeySystems, keySystemsMapping);
  375. shaka.util.StreamUtils.filterTextStreams_(manifest);
  376. await shaka.util.StreamUtils.filterImageStreams_(manifest);
  377. }
  378. /**
  379. * Alters the given Manifest to filter out any streams unsupported by the
  380. * platform via MediaCapabilities.decodingInfo() API.
  381. *
  382. * @param {shaka.drm.DrmEngine} drmEngine
  383. * @param {shaka.extern.Manifest} manifest
  384. * @param {boolean} usePersistentLicenses
  385. * @param {!Array<string>} preferredKeySystems
  386. * @param {!Object<string, string>} keySystemsMapping
  387. */
  388. static async filterManifestByMediaCapabilities(
  389. drmEngine, manifest, usePersistentLicenses, preferredKeySystems,
  390. keySystemsMapping) {
  391. goog.asserts.assert(navigator.mediaCapabilities,
  392. 'MediaCapabilities should be valid.');
  393. if (shaka.device.DeviceFactory.getDevice()
  394. .shouldOverrideDolbyVisionCodecs()) {
  395. shaka.util.StreamUtils.overrideDolbyVisionCodecs(manifest.variants);
  396. }
  397. await shaka.util.StreamUtils.getDecodingInfosForVariants(
  398. manifest.variants, usePersistentLicenses, /* srcEquals= */ false,
  399. preferredKeySystems);
  400. let keySystem = null;
  401. if (drmEngine) {
  402. const drmInfo = drmEngine.getDrmInfo();
  403. if (drmInfo) {
  404. keySystem = drmInfo.keySystem;
  405. }
  406. }
  407. const StreamUtils = shaka.util.StreamUtils;
  408. manifest.variants = manifest.variants.filter((variant) => {
  409. const supported = StreamUtils.checkVariantSupported_(
  410. variant, keySystem, keySystemsMapping);
  411. // Filter out all unsupported variants.
  412. if (!supported) {
  413. shaka.log.debug('Dropping variant - not compatible with platform',
  414. StreamUtils.getVariantSummaryString_(variant));
  415. }
  416. return supported;
  417. });
  418. }
  419. /**
  420. * Maps Dolby Vision codecs to H.264 and H.265 equivalents as a workaround
  421. * to make Dolby Vision playback work on some platforms.
  422. *
  423. * Mapping is done according to the relevant Dolby documentation found here:
  424. * https://professionalsupport.dolby.com/s/article/How-to-signal-Dolby-Vision-in-MPEG-DASH?language=en_US
  425. * @param {!Array<!shaka.extern.Variant>} variants
  426. */
  427. static overrideDolbyVisionCodecs(variants) {
  428. /** @type {!Map<string, string>} */
  429. const codecMap = new Map()
  430. .set('dvav', 'avc3')
  431. .set('dva1', 'avc1')
  432. .set('dvhe', 'hev1')
  433. .set('dvh1', 'hvc1')
  434. .set('dvc1', 'vvc1')
  435. .set('dvi1', 'vvi1');
  436. /** @type {!Set<!shaka.extern.Stream>} */
  437. const streams = new Set();
  438. for (const variant of variants) {
  439. if (variant.video) {
  440. streams.add(variant.video);
  441. }
  442. }
  443. for (const video of streams) {
  444. for (const [dvCodec, replacement] of codecMap) {
  445. if (video.codecs.includes(dvCodec)) {
  446. video.codecs = video.codecs.replace(dvCodec, replacement);
  447. break;
  448. }
  449. }
  450. }
  451. }
  452. /**
  453. * @param {!shaka.extern.Variant} variant
  454. * @param {?string} keySystem
  455. * @param {!Object<string, string>} keySystemsMapping
  456. * @return {boolean}
  457. * @private
  458. */
  459. static checkVariantSupported_(variant, keySystem, keySystemsMapping) {
  460. const variantSupported = variant.decodingInfos.some((decodingInfo) => {
  461. if (!decodingInfo.supported) {
  462. return false;
  463. }
  464. if (keySystem) {
  465. const keySystemAccess = decodingInfo.keySystemAccess;
  466. if (keySystemAccess) {
  467. const currentKeySystem =
  468. keySystemsMapping[keySystemAccess.keySystem] ||
  469. keySystemAccess.keySystem;
  470. if (currentKeySystem != keySystem) {
  471. return false;
  472. }
  473. }
  474. }
  475. return true;
  476. });
  477. if (!variantSupported) {
  478. return false;
  479. }
  480. const device = shaka.device.DeviceFactory.getDevice();
  481. const isXboxOne = device.getDeviceName() === 'Xbox';
  482. const isFirefoxAndroid =
  483. device.getDeviceType() === shaka.device.IDevice.DeviceType.MOBILE &&
  484. device.getBrowserEngine() === shaka.device.IDevice.BrowserEngine.GECKO;
  485. // See: https://github.com/shaka-project/shaka-player/issues/3860
  486. const video = variant.video;
  487. const videoWidth = (video && video.width) || 0;
  488. const videoHeight = (video && video.height) || 0;
  489. // See: https://github.com/shaka-project/shaka-player/issues/3380
  490. // Note: it makes sense to drop early
  491. if (isXboxOne && video && (videoWidth > 1920 || videoHeight > 1080) &&
  492. (video.codecs.includes('avc1.') || video.codecs.includes('avc3.'))) {
  493. return false;
  494. }
  495. const videoDependencyStream = video && video.dependencyStream;
  496. if (videoDependencyStream &&
  497. !shaka.lcevc.Dec.isStreamSupported(videoDependencyStream)) {
  498. return false;
  499. }
  500. const audio = variant.audio;
  501. // See: https://github.com/shaka-project/shaka-player/issues/6111
  502. // It seems that Firefox Android reports that it supports
  503. // Opus + Widevine, but it is not actually supported.
  504. // It makes sense to drop early.
  505. if (isFirefoxAndroid && audio && audio.encrypted &&
  506. audio.codecs.toLowerCase().includes('opus')) {
  507. return false;
  508. }
  509. const audioDependencyStream = audio && audio.dependencyStream;
  510. if (audioDependencyStream) {
  511. return false;
  512. }
  513. return true;
  514. }
  515. /**
  516. * Queries mediaCapabilities for the decoding info for that decoding config,
  517. * and assigns it to the given variant.
  518. * If that query has been done before, instead return a cached result.
  519. * @param {!shaka.extern.Variant} variant
  520. * @param {!Array<!MediaDecodingConfiguration>} decodingConfigs
  521. * @private
  522. */
  523. static async getDecodingInfosForVariant_(variant, decodingConfigs) {
  524. /**
  525. * @param {?MediaCapabilitiesDecodingInfo} a
  526. * @param {!MediaCapabilitiesDecodingInfo} b
  527. * @return {!MediaCapabilitiesDecodingInfo}
  528. */
  529. const merge = (a, b) => {
  530. if (!a) {
  531. return b;
  532. } else {
  533. const res = shaka.util.ObjectUtils.shallowCloneObject(a);
  534. res.supported = a.supported && b.supported;
  535. res.powerEfficient = a.powerEfficient && b.powerEfficient;
  536. res.smooth = a.smooth && b.smooth;
  537. if (b.keySystemAccess && !res.keySystemAccess) {
  538. res.keySystemAccess = b.keySystemAccess;
  539. }
  540. return res;
  541. }
  542. };
  543. const StreamUtils = shaka.util.StreamUtils;
  544. /** @type {?MediaCapabilitiesDecodingInfo} */
  545. let finalResult = null;
  546. const promises = [];
  547. for (const decodingConfig of decodingConfigs) {
  548. const cacheKey =
  549. shaka.util.ObjectUtils.alphabeticalKeyOrderStringify(decodingConfig);
  550. const cache = StreamUtils.decodingConfigCache_;
  551. if (cache.has(cacheKey)) {
  552. shaka.log.v2('Using cached results of mediaCapabilities.decodingInfo',
  553. 'for key', cacheKey);
  554. finalResult = merge(finalResult, cache.get(cacheKey));
  555. } else {
  556. // Do a final pass-over of the decoding config: if a given stream has
  557. // multiple codecs, that suggests that it switches between those codecs
  558. // at points of the go-through.
  559. // mediaCapabilities by itself will report "not supported" when you
  560. // put in multiple different codecs, so each has to be checked
  561. // individually. So check each and take the worst result, to determine
  562. // overall variant compatibility.
  563. promises.push(StreamUtils
  564. .checkEachDecodingConfigCombination_(decodingConfig).then((res) => {
  565. /** @type {?MediaCapabilitiesDecodingInfo} */
  566. let acc = null;
  567. for (const result of (res || [])) {
  568. acc = merge(acc, result);
  569. }
  570. if (acc) {
  571. cache.set(cacheKey, acc);
  572. finalResult = merge(finalResult, acc);
  573. }
  574. }));
  575. }
  576. }
  577. await Promise.all(promises);
  578. if (finalResult) {
  579. variant.decodingInfos.push(finalResult);
  580. }
  581. }
  582. /**
  583. * @param {!MediaDecodingConfiguration} decodingConfig
  584. * @return {!Promise<?Array<!MediaCapabilitiesDecodingInfo>>}
  585. * @private
  586. */
  587. static checkEachDecodingConfigCombination_(decodingConfig) {
  588. let videoCodecs = [''];
  589. if (decodingConfig.video) {
  590. videoCodecs = shaka.util.MimeUtils.getCodecs(
  591. decodingConfig.video.contentType).split(',');
  592. }
  593. let audioCodecs = [''];
  594. if (decodingConfig.audio) {
  595. audioCodecs = shaka.util.MimeUtils.getCodecs(
  596. decodingConfig.audio.contentType).split(',');
  597. }
  598. const promises = [];
  599. for (const videoCodec of videoCodecs) {
  600. for (const audioCodec of audioCodecs) {
  601. const copy = shaka.util.ObjectUtils.cloneObject(decodingConfig);
  602. if (decodingConfig.video) {
  603. const mimeType = shaka.util.MimeUtils.getBasicType(
  604. copy.video.contentType);
  605. copy.video.contentType = shaka.util.MimeUtils.getFullType(
  606. mimeType, videoCodec);
  607. }
  608. if (decodingConfig.audio) {
  609. const mimeType = shaka.util.MimeUtils.getBasicType(
  610. copy.audio.contentType);
  611. copy.audio.contentType = shaka.util.MimeUtils.getFullType(
  612. mimeType, audioCodec);
  613. }
  614. promises.push(new Promise((resolve, reject) => {
  615. // On some (Android) WebView environments, decodingInfo will
  616. // not resolve or reject, at least if RESOURCE_PROTECTED_MEDIA_ID
  617. // is not set. This is a workaround for that issue.
  618. const TIMEOUT_FOR_DECODING_INFO_IN_SECONDS = 5;
  619. let promise;
  620. const device = shaka.device.DeviceFactory.getDevice();
  621. if (device.getDeviceType() ==
  622. shaka.device.IDevice.DeviceType.MOBILE) {
  623. promise = shaka.util.Functional.promiseWithTimeout(
  624. TIMEOUT_FOR_DECODING_INFO_IN_SECONDS,
  625. navigator.mediaCapabilities.decodingInfo(copy),
  626. );
  627. } else {
  628. promise = navigator.mediaCapabilities.decodingInfo(copy);
  629. }
  630. promise.then((res) => {
  631. resolve(res);
  632. }).catch(reject);
  633. }));
  634. }
  635. }
  636. return Promise.all(promises).catch((e) => {
  637. shaka.log.info('MediaCapabilities.decodingInfo() failed.',
  638. JSON.stringify(decodingConfig), e);
  639. return null;
  640. });
  641. }
  642. /**
  643. * Get the decodingInfo results of the variants via MediaCapabilities.
  644. * This should be called after the DrmEngine is created and configured, and
  645. * before DrmEngine sets the mediaKeys.
  646. *
  647. * @param {!Array<shaka.extern.Variant>} variants
  648. * @param {boolean} usePersistentLicenses
  649. * @param {boolean} srcEquals
  650. * @param {!Array<string>} preferredKeySystems
  651. * @exportDoc
  652. */
  653. static async getDecodingInfosForVariants(variants, usePersistentLicenses,
  654. srcEquals, preferredKeySystems) {
  655. const gotDecodingInfo = variants.some((variant) =>
  656. variant.decodingInfos.length);
  657. if (gotDecodingInfo) {
  658. shaka.log.debug('Already got the variants\' decodingInfo.');
  659. return;
  660. }
  661. // Try to get preferred key systems first to avoid unneeded calls to CDM.
  662. for (const preferredKeySystem of preferredKeySystems) {
  663. let keySystemSatisfied = false;
  664. for (const variant of variants) {
  665. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  666. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  667. variant, usePersistentLicenses, srcEquals)
  668. .filter((configs) => {
  669. // All configs in a batch will have the same keySystem.
  670. const config = configs[0];
  671. const keySystem = config.keySystemConfiguration &&
  672. config.keySystemConfiguration.keySystem;
  673. return keySystem === preferredKeySystem;
  674. });
  675. // The reason we are performing this await in a loop rather than
  676. // batching into a `promise.all` is performance related.
  677. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  678. for (const configs of decodingConfigs) {
  679. // eslint-disable-next-line no-await-in-loop
  680. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  681. variant, configs);
  682. }
  683. if (variant.decodingInfos.length) {
  684. keySystemSatisfied = true;
  685. }
  686. } // for (const variant of variants)
  687. if (keySystemSatisfied) {
  688. // Return if any preferred key system is already satisfied.
  689. return;
  690. }
  691. } // for (const preferredKeySystem of preferredKeySystems)
  692. for (const variant of variants) {
  693. /** @type {!Array<!Array<!MediaDecodingConfiguration>>} */
  694. const decodingConfigs = shaka.util.StreamUtils.getDecodingConfigs_(
  695. variant, usePersistentLicenses, srcEquals)
  696. .filter((configs) => {
  697. // All configs in a batch will have the same keySystem.
  698. const config = configs[0];
  699. const keySystem = config.keySystemConfiguration &&
  700. config.keySystemConfiguration.keySystem;
  701. // Avoid checking preferred systems twice.
  702. return !keySystem || !preferredKeySystems.includes(keySystem);
  703. });
  704. // The reason we are performing this await in a loop rather than
  705. // batching into a `promise.all` is performance related.
  706. // https://github.com/shaka-project/shaka-player/pull/4708#discussion_r1022581178
  707. for (const configs of decodingConfigs) {
  708. // eslint-disable-next-line no-await-in-loop
  709. await shaka.util.StreamUtils.getDecodingInfosForVariant_(
  710. variant, configs);
  711. }
  712. }
  713. }
  714. /**
  715. * Generate a batch of MediaDecodingConfiguration objects to get the
  716. * decodingInfo results for each variant.
  717. * Each batch shares the same DRM information, and represents the various
  718. * fullMimeType combinations of the streams.
  719. * @param {!shaka.extern.Variant} variant
  720. * @param {boolean} usePersistentLicenses
  721. * @param {boolean} srcEquals
  722. * @return {!Array<!Array<!MediaDecodingConfiguration>>}
  723. * @private
  724. */
  725. static getDecodingConfigs_(variant, usePersistentLicenses, srcEquals) {
  726. const audio = variant.audio;
  727. const video = variant.video;
  728. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  729. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  730. const MimeUtils = shaka.util.MimeUtils;
  731. const StreamUtils = shaka.util.StreamUtils;
  732. const videoConfigs = [];
  733. const audioConfigs = [];
  734. if (video) {
  735. for (const fullMimeType of video.fullMimeTypes) {
  736. let videoCodecs = MimeUtils.getCodecs(fullMimeType);
  737. // For multiplexed streams with audio+video codecs, the config should
  738. // have AudioConfiguration and VideoConfiguration.
  739. // We ignore the multiplexed audio when there is normal audio also.
  740. if (videoCodecs.includes(',') && !audio) {
  741. const allCodecs = videoCodecs.split(',');
  742. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  743. videoCodecs = ManifestParserUtils.guessCodecs(
  744. ContentType.VIDEO, allCodecs);
  745. let audioCodecs = ManifestParserUtils.guessCodecs(
  746. ContentType.AUDIO, allCodecs);
  747. audioCodecs = StreamUtils.getCorrectAudioCodecs(
  748. audioCodecs, baseMimeType);
  749. const audioFullType = MimeUtils.getFullOrConvertedType(
  750. baseMimeType, audioCodecs, ContentType.AUDIO);
  751. audioConfigs.push({
  752. contentType: audioFullType,
  753. channels: 2,
  754. bitrate: variant.bandwidth || 1,
  755. samplerate: 1,
  756. spatialRendering: false,
  757. });
  758. }
  759. videoCodecs = StreamUtils.getCorrectVideoCodecs(videoCodecs);
  760. const fullType = MimeUtils.getFullOrConvertedType(
  761. MimeUtils.getBasicType(fullMimeType), videoCodecs,
  762. ContentType.VIDEO);
  763. // VideoConfiguration
  764. const videoConfig = {
  765. contentType: fullType,
  766. // NOTE: Some decoders strictly check the width and height fields and
  767. // won't decode smaller than 64x64. So if we don't have this info (as
  768. // is the case in some of our simpler tests), assume a 64x64
  769. // resolution to fill in this required field for MediaCapabilities.
  770. //
  771. // This became an issue specifically on Firefox on M1 Macs.
  772. width: video.width || 64,
  773. height: video.height || 64,
  774. bitrate: video.bandwidth || variant.bandwidth || 1,
  775. // framerate must be greater than 0, otherwise the config is invalid.
  776. framerate: video.frameRate || 1,
  777. };
  778. if (video.hdr) {
  779. switch (video.hdr) {
  780. case 'SDR':
  781. videoConfig.transferFunction = 'srgb';
  782. break;
  783. case 'PQ':
  784. videoConfig.transferFunction = 'pq';
  785. break;
  786. case 'HLG':
  787. videoConfig.transferFunction = 'hlg';
  788. break;
  789. }
  790. }
  791. if (video.colorGamut) {
  792. videoConfig.colorGamut = video.colorGamut;
  793. }
  794. videoConfigs.push(videoConfig);
  795. }
  796. }
  797. if (audio) {
  798. for (const fullMimeType of audio.fullMimeTypes) {
  799. const baseMimeType = MimeUtils.getBasicType(fullMimeType);
  800. const codecs = StreamUtils.getCorrectAudioCodecs(
  801. MimeUtils.getCodecs(fullMimeType), baseMimeType);
  802. const fullType = MimeUtils.getFullOrConvertedType(
  803. baseMimeType, codecs, ContentType.AUDIO);
  804. // AudioConfiguration
  805. audioConfigs.push({
  806. contentType: fullType,
  807. channels: audio.channelsCount || 2,
  808. bitrate: audio.bandwidth || variant.bandwidth || 1,
  809. samplerate: audio.audioSamplingRate || 1,
  810. spatialRendering: audio.spatialAudio,
  811. });
  812. }
  813. }
  814. // Generate each combination of video and audio config as a separate
  815. // MediaDecodingConfiguration, inside the main "batch".
  816. /** @type {!Array<!MediaDecodingConfiguration>} */
  817. const mediaDecodingConfigBatch = [];
  818. if (videoConfigs.length == 0) {
  819. videoConfigs.push(null);
  820. }
  821. if (audioConfigs.length == 0) {
  822. audioConfigs.push(null);
  823. }
  824. for (const videoConfig of videoConfigs) {
  825. for (const audioConfig of audioConfigs) {
  826. /** @type {!MediaDecodingConfiguration} */
  827. const mediaDecodingConfig = {
  828. type: srcEquals ? 'file' : 'media-source',
  829. };
  830. if (videoConfig) {
  831. mediaDecodingConfig.video = videoConfig;
  832. }
  833. if (audioConfig) {
  834. mediaDecodingConfig.audio = audioConfig;
  835. }
  836. mediaDecodingConfigBatch.push(mediaDecodingConfig);
  837. }
  838. }
  839. const videoDrmInfos = variant.video ? variant.video.drmInfos : [];
  840. const audioDrmInfos = variant.audio ? variant.audio.drmInfos : [];
  841. const allDrmInfos = videoDrmInfos.concat(audioDrmInfos);
  842. // Return a list containing the mediaDecodingConfig for unencrypted variant.
  843. if (!allDrmInfos.length) {
  844. return [mediaDecodingConfigBatch];
  845. }
  846. // A list of MediaDecodingConfiguration objects created for the variant.
  847. const configs = [];
  848. // Get all the drm info so that we can avoid using nested loops when we
  849. // just need the drm info.
  850. const drmInfoByKeySystems = new Map();
  851. for (const info of allDrmInfos) {
  852. if (!drmInfoByKeySystems.get(info.keySystem)) {
  853. drmInfoByKeySystems.set(info.keySystem, []);
  854. }
  855. drmInfoByKeySystems.get(info.keySystem).push(info);
  856. }
  857. const persistentState =
  858. usePersistentLicenses ? 'required' : 'optional';
  859. const sessionTypes =
  860. usePersistentLicenses ? ['persistent-license'] : ['temporary'];
  861. for (const keySystem of drmInfoByKeySystems.keys()) {
  862. const drmInfos = drmInfoByKeySystems.get(keySystem);
  863. // Get all the robustness info so that we can avoid using nested
  864. // loops when we just need the robustness.
  865. const drmInfosByRobustness = new Map();
  866. for (const info of drmInfos) {
  867. const keyName = `${info.videoRobustness},${info.audioRobustness}`;
  868. if (!drmInfosByRobustness.get(keyName)) {
  869. drmInfosByRobustness.set(keyName, []);
  870. }
  871. drmInfosByRobustness.get(keyName).push(info);
  872. }
  873. for (const drmInfosRobustness of drmInfosByRobustness.values()) {
  874. const modifiedMediaDecodingConfigBatch = [];
  875. for (const base of mediaDecodingConfigBatch) {
  876. // Create a copy of the mediaDecodingConfig.
  877. const config = /** @type {!MediaDecodingConfiguration} */
  878. (Object.assign({}, base));
  879. /** @type {!MediaCapabilitiesKeySystemConfiguration} */
  880. const keySystemConfig = {
  881. keySystem: keySystem,
  882. initDataType: 'cenc',
  883. persistentState: persistentState,
  884. distinctiveIdentifier: 'optional',
  885. sessionTypes: sessionTypes,
  886. };
  887. for (const info of drmInfosRobustness) {
  888. if (info.initData && info.initData.length) {
  889. const initDataTypes = new Set();
  890. for (const initData of info.initData) {
  891. initDataTypes.add(initData.initDataType);
  892. }
  893. if (initDataTypes.size > 1) {
  894. shaka.log.v2('DrmInfo contains more than one initDataType,',
  895. 'and we use the initDataType of the first initData.',
  896. info);
  897. }
  898. keySystemConfig.initDataType = info.initData[0].initDataType;
  899. }
  900. if (info.distinctiveIdentifierRequired) {
  901. keySystemConfig.distinctiveIdentifier = 'required';
  902. }
  903. if (info.persistentStateRequired) {
  904. keySystemConfig.persistentState = 'required';
  905. }
  906. if (info.sessionType) {
  907. keySystemConfig.sessionTypes = [info.sessionType];
  908. }
  909. if (audio) {
  910. if (!keySystemConfig.audio) {
  911. // KeySystemTrackConfiguration
  912. keySystemConfig.audio = {
  913. robustness: info.audioRobustness,
  914. };
  915. if (info.encryptionScheme) {
  916. keySystemConfig.audio.encryptionScheme =
  917. info.encryptionScheme;
  918. }
  919. } else {
  920. if (info.encryptionScheme) {
  921. keySystemConfig.audio.encryptionScheme =
  922. keySystemConfig.audio.encryptionScheme ||
  923. info.encryptionScheme;
  924. }
  925. keySystemConfig.audio.robustness =
  926. keySystemConfig.audio.robustness ||
  927. info.audioRobustness;
  928. }
  929. // See: https://github.com/shaka-project/shaka-player/issues/4659
  930. if (keySystemConfig.audio.robustness == '') {
  931. delete keySystemConfig.audio.robustness;
  932. }
  933. }
  934. if (video) {
  935. if (!keySystemConfig.video) {
  936. // KeySystemTrackConfiguration
  937. keySystemConfig.video = {
  938. robustness: info.videoRobustness,
  939. };
  940. if (info.encryptionScheme) {
  941. keySystemConfig.video.encryptionScheme =
  942. info.encryptionScheme;
  943. }
  944. } else {
  945. if (info.encryptionScheme) {
  946. keySystemConfig.video.encryptionScheme =
  947. keySystemConfig.video.encryptionScheme ||
  948. info.encryptionScheme;
  949. }
  950. keySystemConfig.video.robustness =
  951. keySystemConfig.video.robustness ||
  952. info.videoRobustness;
  953. }
  954. // See: https://github.com/shaka-project/shaka-player/issues/4659
  955. if (keySystemConfig.video.robustness == '') {
  956. delete keySystemConfig.video.robustness;
  957. }
  958. }
  959. }
  960. config.keySystemConfiguration = keySystemConfig;
  961. modifiedMediaDecodingConfigBatch.push(config);
  962. }
  963. configs.push(modifiedMediaDecodingConfigBatch);
  964. }
  965. }
  966. return configs;
  967. }
  968. /**
  969. * Generates the correct audio codec for MediaDecodingConfiguration and
  970. * for MediaSource.isTypeSupported.
  971. * @param {string} codecs
  972. * @param {string} mimeType
  973. * @return {string}
  974. */
  975. static getCorrectAudioCodecs(codecs, mimeType) {
  976. // According to RFC 6381 section 3.3, 'fLaC' is actually the correct
  977. // codec string. We still need to map it to 'flac', as some browsers
  978. // currently don't support 'fLaC', while 'flac' is supported by most
  979. // major browsers.
  980. // See https://bugs.chromium.org/p/chromium/issues/detail?id=1422728
  981. const device = shaka.device.DeviceFactory.getDevice();
  982. const webkit = shaka.device.IDevice.BrowserEngine.WEBKIT;
  983. if (codecs.toLowerCase() == 'flac') {
  984. if (device.getBrowserEngine() != webkit) {
  985. return 'flac';
  986. } else {
  987. return 'fLaC';
  988. }
  989. }
  990. // The same is true for 'Opus'.
  991. if (codecs.toLowerCase() === 'opus') {
  992. if (device.getBrowserEngine() != webkit) {
  993. return 'opus';
  994. } else {
  995. if (shaka.util.MimeUtils.getContainerType(mimeType) == 'mp4') {
  996. return 'Opus';
  997. } else {
  998. return 'opus';
  999. }
  1000. }
  1001. }
  1002. if (codecs.toLowerCase() == 'ac-3' && device.requiresEC3InitSegments()) {
  1003. return 'ec-3';
  1004. }
  1005. return codecs;
  1006. }
  1007. /**
  1008. * Generates the correct video codec for MediaDecodingConfiguration and
  1009. * for MediaSource.isTypeSupported.
  1010. * @param {string} codec
  1011. * @return {string}
  1012. */
  1013. static getCorrectVideoCodecs(codec) {
  1014. if (codec.includes('avc1')) {
  1015. // Convert avc1 codec string from RFC-4281 to RFC-6381 for
  1016. // MediaSource.isTypeSupported
  1017. // Example, convert avc1.66.30 to avc1.42001e (0x42 == 66 and 0x1e == 30)
  1018. const avcData = codec.split('.');
  1019. if (avcData.length == 3) {
  1020. let result = avcData.shift() + '.';
  1021. result += parseInt(avcData.shift(), 10).toString(16);
  1022. result +=
  1023. ('000' + parseInt(avcData.shift(), 10).toString(16)).slice(-4);
  1024. return result;
  1025. }
  1026. } else if (codec == 'vp9') {
  1027. // MediaCapabilities supports 'vp09...' codecs, but not 'vp9'. Translate
  1028. // vp9 codec strings into 'vp09...', to allow such content to play with
  1029. // mediaCapabilities enabled.
  1030. // This means profile 0, level 4.1, 8-bit color. This supports 1080p @
  1031. // 60Hz. See https://en.wikipedia.org/wiki/VP9#Levels
  1032. //
  1033. // If we don't have more detailed codec info, assume this profile and
  1034. // level because it's high enough to likely accommodate the parameters we
  1035. // do have, such as width and height. If an implementation is checking
  1036. // the profile and level very strictly, we want older VP9 content to
  1037. // still work to some degree. But we don't want to set a level so high
  1038. // that it is rejected by a hardware decoder that can't handle the
  1039. // maximum requirements of the level.
  1040. //
  1041. // This became an issue specifically on Firefox on M1 Macs.
  1042. return 'vp09.00.41.08';
  1043. }
  1044. return codec;
  1045. }
  1046. /**
  1047. * Alters the given Manifest to filter out any streams incompatible with the
  1048. * current variant.
  1049. *
  1050. * @param {?shaka.extern.Variant} currentVariant
  1051. * @param {shaka.extern.Manifest} manifest
  1052. */
  1053. static filterManifestByCurrentVariant(currentVariant, manifest) {
  1054. const StreamUtils = shaka.util.StreamUtils;
  1055. manifest.variants = manifest.variants.filter((variant) => {
  1056. const audio = variant.audio;
  1057. const video = variant.video;
  1058. if (audio && currentVariant && currentVariant.audio) {
  1059. if (!StreamUtils.areStreamsCompatible_(audio, currentVariant.audio)) {
  1060. shaka.log.debug('Dropping variant - not compatible with active audio',
  1061. 'active audio',
  1062. StreamUtils.getStreamSummaryString_(currentVariant.audio),
  1063. 'variant.audio',
  1064. StreamUtils.getStreamSummaryString_(audio));
  1065. return false;
  1066. }
  1067. }
  1068. if (video && currentVariant && currentVariant.video) {
  1069. if (!StreamUtils.areStreamsCompatible_(video, currentVariant.video)) {
  1070. shaka.log.debug('Dropping variant - not compatible with active video',
  1071. 'active video',
  1072. StreamUtils.getStreamSummaryString_(currentVariant.video),
  1073. 'variant.video',
  1074. StreamUtils.getStreamSummaryString_(video));
  1075. return false;
  1076. }
  1077. }
  1078. return true;
  1079. });
  1080. }
  1081. /**
  1082. * Alters the given Manifest to filter out any unsupported text streams.
  1083. *
  1084. * @param {shaka.extern.Manifest} manifest
  1085. * @private
  1086. */
  1087. static filterTextStreams_(manifest) {
  1088. // Filter text streams.
  1089. manifest.textStreams = manifest.textStreams.filter((stream) => {
  1090. const fullMimeType = shaka.util.MimeUtils.getFullType(
  1091. stream.mimeType, stream.codecs);
  1092. const keep = shaka.text.TextEngine.isTypeSupported(fullMimeType);
  1093. if (!keep) {
  1094. shaka.log.debug('Dropping text stream. Is not supported by the ' +
  1095. 'platform.', stream);
  1096. }
  1097. return keep;
  1098. });
  1099. }
  1100. /**
  1101. * Alters the given Manifest to filter out any unsupported image streams.
  1102. *
  1103. * @param {shaka.extern.Manifest} manifest
  1104. * @private
  1105. */
  1106. static async filterImageStreams_(manifest) {
  1107. const imageStreams = [];
  1108. for (const stream of manifest.imageStreams) {
  1109. let mimeType = stream.mimeType;
  1110. if (mimeType == 'application/mp4' && stream.codecs == 'mjpg') {
  1111. mimeType = 'image/jpg';
  1112. }
  1113. if (!shaka.util.StreamUtils.supportedImageMimeTypes_.has(mimeType)) {
  1114. const minImage = shaka.util.StreamUtils.minImage_.get(mimeType);
  1115. if (minImage) {
  1116. // eslint-disable-next-line no-await-in-loop
  1117. const res = await shaka.util.StreamUtils.isImageSupported_(minImage);
  1118. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, res);
  1119. } else {
  1120. shaka.util.StreamUtils.supportedImageMimeTypes_.set(mimeType, false);
  1121. }
  1122. }
  1123. const keep =
  1124. shaka.util.StreamUtils.supportedImageMimeTypes_.get(mimeType);
  1125. if (!keep) {
  1126. shaka.log.debug('Dropping image stream. Is not supported by the ' +
  1127. 'platform.', stream);
  1128. } else {
  1129. imageStreams.push(stream);
  1130. }
  1131. }
  1132. manifest.imageStreams = imageStreams;
  1133. }
  1134. /**
  1135. * @param {string} minImage
  1136. * @return {!Promise<boolean>}
  1137. * @private
  1138. */
  1139. static isImageSupported_(minImage) {
  1140. return new Promise((resolve) => {
  1141. const imageElement = /** @type {HTMLImageElement} */(new Image());
  1142. imageElement.src = minImage;
  1143. if ('decode' in imageElement) {
  1144. imageElement.decode().then(() => {
  1145. resolve(true);
  1146. }).catch(() => {
  1147. resolve(false);
  1148. });
  1149. } else {
  1150. imageElement.onload = imageElement.onerror = () => {
  1151. resolve(imageElement.height === 2);
  1152. };
  1153. }
  1154. });
  1155. }
  1156. /**
  1157. * @param {shaka.extern.Stream} s0
  1158. * @param {shaka.extern.Stream} s1
  1159. * @return {boolean}
  1160. * @private
  1161. */
  1162. static areStreamsCompatible_(s0, s1) {
  1163. // Basic mime types and basic codecs need to match.
  1164. // For example, we can't adapt between WebM and MP4,
  1165. // nor can we adapt between mp4a.* to ec-3.
  1166. // We can switch between text types on the fly,
  1167. // so don't run this check on text.
  1168. if (s0.mimeType != s1.mimeType) {
  1169. return false;
  1170. }
  1171. if (s0.codecs.split('.')[0] != s1.codecs.split('.')[0]) {
  1172. return false;
  1173. }
  1174. return true;
  1175. }
  1176. /**
  1177. * @param {shaka.extern.Variant} variant
  1178. * @return {shaka.extern.Track}
  1179. */
  1180. static variantToTrack(variant) {
  1181. const ManifestParserUtils = shaka.util.ManifestParserUtils;
  1182. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1183. /** @type {?shaka.extern.Stream} */
  1184. const audio = variant.audio;
  1185. /** @type {?shaka.extern.Stream} */
  1186. const video = variant.video;
  1187. /** @type {?string} */
  1188. const audioMimeType = audio ? audio.mimeType : null;
  1189. /** @type {?string} */
  1190. const videoMimeType = video ? video.mimeType : null;
  1191. /** @type {?string} */
  1192. const audioCodec = audio ? audio.codecs : null;
  1193. /** @type {?string} */
  1194. const videoCodec = video ? video.codecs : null;
  1195. /** @type {?string} */
  1196. const audioGroupId = audio ? audio.groupId : null;
  1197. /** @type {!Array<string>} */
  1198. const mimeTypes = [];
  1199. if (video) {
  1200. mimeTypes.push(video.mimeType);
  1201. }
  1202. if (audio) {
  1203. mimeTypes.push(audio.mimeType);
  1204. }
  1205. /** @type {?string} */
  1206. const mimeType = mimeTypes[0] || null;
  1207. /** @type {!Array<string>} */
  1208. const kinds = [];
  1209. if (audio) {
  1210. kinds.push(audio.kind);
  1211. }
  1212. if (video) {
  1213. kinds.push(video.kind);
  1214. }
  1215. /** @type {?string} */
  1216. const kind = kinds[0] || null;
  1217. /** @type {!Set<string>} */
  1218. const roles = new Set();
  1219. if (audio) {
  1220. for (const role of audio.roles) {
  1221. roles.add(role);
  1222. }
  1223. }
  1224. if (video) {
  1225. for (const role of video.roles) {
  1226. roles.add(role);
  1227. }
  1228. }
  1229. /** @type {shaka.extern.Track} */
  1230. const track = {
  1231. id: variant.id,
  1232. active: false,
  1233. type: 'variant',
  1234. bandwidth: variant.bandwidth,
  1235. language: variant.language,
  1236. label: null,
  1237. kind: kind,
  1238. width: null,
  1239. height: null,
  1240. frameRate: null,
  1241. pixelAspectRatio: null,
  1242. hdr: null,
  1243. colorGamut: null,
  1244. videoLayout: null,
  1245. mimeType: mimeType,
  1246. audioMimeType: audioMimeType,
  1247. videoMimeType: videoMimeType,
  1248. codecs: '',
  1249. audioCodec: audioCodec,
  1250. videoCodec: videoCodec,
  1251. primary: variant.primary,
  1252. roles: Array.from(roles),
  1253. audioRoles: null,
  1254. forced: false,
  1255. videoId: null,
  1256. audioId: null,
  1257. audioGroupId: audioGroupId,
  1258. channelsCount: null,
  1259. audioSamplingRate: null,
  1260. spatialAudio: false,
  1261. tilesLayout: null,
  1262. audioBandwidth: null,
  1263. videoBandwidth: null,
  1264. originalVideoId: null,
  1265. originalAudioId: null,
  1266. originalTextId: null,
  1267. originalImageId: null,
  1268. accessibilityPurpose: null,
  1269. originalLanguage: null,
  1270. };
  1271. if (video) {
  1272. track.videoId = video.id;
  1273. track.originalVideoId = video.originalId;
  1274. track.width = video.width || null;
  1275. track.height = video.height || null;
  1276. track.frameRate = video.frameRate || null;
  1277. track.pixelAspectRatio = video.pixelAspectRatio || null;
  1278. track.videoBandwidth = video.bandwidth || null;
  1279. track.hdr = video.hdr || null;
  1280. track.colorGamut = video.colorGamut || null;
  1281. track.videoLayout = video.videoLayout || null;
  1282. const dependencyStream = video.dependencyStream;
  1283. if (dependencyStream) {
  1284. track.width = dependencyStream.width || track.width;
  1285. track.height = dependencyStream.height || track.height;
  1286. track.videoCodec = dependencyStream.codecs || track.videoCodec;
  1287. }
  1288. if (videoCodec.includes(',')) {
  1289. track.channelsCount = video.channelsCount;
  1290. track.audioSamplingRate = video.audioSamplingRate;
  1291. track.spatialAudio = video.spatialAudio;
  1292. track.originalLanguage = video.originalLanguage;
  1293. track.audioMimeType = videoMimeType;
  1294. const allCodecs = videoCodec.split(',');
  1295. try {
  1296. track.videoCodec = ManifestParserUtils.guessCodecs(
  1297. ContentType.VIDEO, allCodecs);
  1298. track.audioCodec = ManifestParserUtils.guessCodecs(
  1299. ContentType.AUDIO, allCodecs);
  1300. } catch (e) {
  1301. // Ignore this error.
  1302. }
  1303. }
  1304. }
  1305. if (audio) {
  1306. track.audioId = audio.id;
  1307. track.originalAudioId = audio.originalId;
  1308. track.channelsCount = audio.channelsCount;
  1309. track.audioSamplingRate = audio.audioSamplingRate;
  1310. track.audioBandwidth = audio.bandwidth || null;
  1311. track.spatialAudio = audio.spatialAudio;
  1312. track.label = audio.label;
  1313. track.audioRoles = audio.roles;
  1314. track.accessibilityPurpose = audio.accessibilityPurpose;
  1315. track.originalLanguage = audio.originalLanguage;
  1316. const dependencyStream = audio.dependencyStream;
  1317. if (dependencyStream) {
  1318. track.audioCodec = dependencyStream.codecs || track.audioCodec;
  1319. }
  1320. }
  1321. /** @type {!Array<string>} */
  1322. const codecs = [];
  1323. if (track.videoCodec) {
  1324. codecs.push(track.videoCodec);
  1325. }
  1326. if (track.audioCodec) {
  1327. codecs.push(track.audioCodec);
  1328. }
  1329. track.codecs = codecs.join(', ');
  1330. return track;
  1331. }
  1332. /**
  1333. * @param {shaka.extern.Stream} stream
  1334. * @return {shaka.extern.TextTrack}
  1335. */
  1336. static textStreamToTrack(stream) {
  1337. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1338. /** @type {shaka.extern.TextTrack} */
  1339. const track = {
  1340. id: stream.id,
  1341. active: false,
  1342. type: ContentType.TEXT,
  1343. bandwidth: stream.bandwidth || 0,
  1344. language: stream.language,
  1345. label: stream.label,
  1346. kind: stream.kind || null,
  1347. mimeType: stream.mimeType,
  1348. codecs: stream.codecs || null,
  1349. primary: stream.primary,
  1350. roles: stream.roles,
  1351. accessibilityPurpose: stream.accessibilityPurpose,
  1352. forced: stream.forced,
  1353. originalTextId: stream.originalId,
  1354. originalLanguage: stream.originalLanguage,
  1355. };
  1356. return track;
  1357. }
  1358. /**
  1359. * @param {shaka.extern.Stream} stream
  1360. * @return {shaka.extern.ImageTrack}
  1361. */
  1362. static imageStreamToTrack(stream) {
  1363. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1364. let width = stream.width || null;
  1365. let height = stream.height || null;
  1366. // The stream width and height represent the size of the entire thumbnail
  1367. // sheet, so divide by the layout.
  1368. let reference = null;
  1369. // Note: segmentIndex is built by default for HLS, but not for DASH, but
  1370. // in DASH this information comes at the stream level and not at the
  1371. // segment level.
  1372. if (stream.segmentIndex) {
  1373. reference = stream.segmentIndex.earliestReference();
  1374. }
  1375. let layout = stream.tilesLayout;
  1376. if (reference) {
  1377. layout = reference.getTilesLayout() || layout;
  1378. }
  1379. if (layout && width != null) {
  1380. width /= Number(layout.split('x')[0]);
  1381. }
  1382. if (layout && height != null) {
  1383. height /= Number(layout.split('x')[1]);
  1384. }
  1385. // TODO: What happens if there are multiple grids, with different
  1386. // layout sizes, inside this image stream?
  1387. /** @type {shaka.extern.ImageTrack} */
  1388. const track = {
  1389. id: stream.id,
  1390. type: ContentType.IMAGE,
  1391. bandwidth: stream.bandwidth || 0,
  1392. width,
  1393. height,
  1394. mimeType: stream.mimeType,
  1395. codecs: stream.codecs || null,
  1396. tilesLayout: layout || null,
  1397. originalImageId: stream.originalId,
  1398. };
  1399. return track;
  1400. }
  1401. /**
  1402. * Generate and return an ID for this track, since the ID field is optional.
  1403. *
  1404. * @param {TextTrack|AudioTrack|VideoTrack} html5Track
  1405. * @return {number} The generated ID.
  1406. */
  1407. static html5TrackId(html5Track) {
  1408. if (!html5Track['__shaka_id']) {
  1409. html5Track['__shaka_id'] = shaka.util.StreamUtils.nextTrackId_++;
  1410. }
  1411. return html5Track['__shaka_id'];
  1412. }
  1413. /**
  1414. * @param {TextTrack} textTrack
  1415. * @return {shaka.extern.TextTrack}
  1416. */
  1417. static html5TextTrackToTrack(textTrack) {
  1418. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1419. /** @type {shaka.extern.TextTrack} */
  1420. const track = {
  1421. id: shaka.util.StreamUtils.html5TrackId(textTrack),
  1422. active: textTrack.mode != 'disabled',
  1423. type: ContentType.TEXT,
  1424. bandwidth: 0,
  1425. language: shaka.util.LanguageUtils.normalize(textTrack.language || 'und'),
  1426. label: textTrack.label,
  1427. kind: textTrack.kind,
  1428. mimeType: null,
  1429. codecs: null,
  1430. primary: false,
  1431. roles: [],
  1432. accessibilityPurpose: null,
  1433. forced: textTrack.kind == 'forced',
  1434. originalTextId: textTrack.id,
  1435. originalLanguage: textTrack.language,
  1436. };
  1437. if (textTrack.kind == 'captions') {
  1438. // See: https://github.com/shaka-project/shaka-player/issues/6233
  1439. track.mimeType = 'unknown';
  1440. }
  1441. if (textTrack.kind == 'subtitles') {
  1442. track.mimeType = 'text/vtt';
  1443. }
  1444. if (textTrack.kind) {
  1445. track.roles = [textTrack.kind];
  1446. }
  1447. return track;
  1448. }
  1449. /**
  1450. * @param {AudioTrack} audioTrack
  1451. * @return {shaka.extern.AudioTrack}
  1452. */
  1453. static html5AudioTrackToTrack(audioTrack) {
  1454. const language = audioTrack.language;
  1455. /** @type {shaka.extern.AudioTrack} */
  1456. const track = {
  1457. active: audioTrack.enabled,
  1458. language: shaka.util.LanguageUtils.normalize(language || 'und'),
  1459. label: audioTrack.label,
  1460. mimeType: null,
  1461. codecs: null,
  1462. primary: audioTrack.kind == 'main',
  1463. roles: [],
  1464. accessibilityPurpose: null,
  1465. channelsCount: null,
  1466. audioSamplingRate: null,
  1467. spatialAudio: false,
  1468. originalLanguage: language,
  1469. };
  1470. if (audioTrack.kind) {
  1471. track.roles.push(audioTrack.kind);
  1472. }
  1473. if (audioTrack.configuration) {
  1474. if (audioTrack.configuration.codec) {
  1475. track.codecs = audioTrack.configuration.codec;
  1476. }
  1477. if (audioTrack.configuration.sampleRate) {
  1478. track.audioSamplingRate = audioTrack.configuration.sampleRate;
  1479. }
  1480. if (audioTrack.configuration.numberOfChannels) {
  1481. track.channelsCount = audioTrack.configuration.numberOfChannels;
  1482. }
  1483. }
  1484. return track;
  1485. }
  1486. /**
  1487. * @param {?AudioTrack} audioTrack
  1488. * @param {?VideoTrack} videoTrack
  1489. * @return {shaka.extern.Track}
  1490. */
  1491. static html5TrackToShakaTrack(audioTrack, videoTrack) {
  1492. goog.asserts.assert(audioTrack || videoTrack,
  1493. 'There must be at least audioTrack or videoTrack.');
  1494. const LanguageUtils = shaka.util.LanguageUtils;
  1495. const language = audioTrack ? audioTrack.language : null;
  1496. /** @type {shaka.extern.Track} */
  1497. const track = {
  1498. id: shaka.util.StreamUtils.html5TrackId(audioTrack || videoTrack),
  1499. active: audioTrack ? audioTrack.enabled : videoTrack.selected,
  1500. type: 'variant',
  1501. bandwidth: 0,
  1502. language: LanguageUtils.normalize(language || 'und'),
  1503. label: audioTrack ? audioTrack.label : null,
  1504. kind: audioTrack ? audioTrack.kind : null,
  1505. width: null,
  1506. height: null,
  1507. frameRate: null,
  1508. pixelAspectRatio: null,
  1509. hdr: null,
  1510. colorGamut: null,
  1511. videoLayout: null,
  1512. mimeType: null,
  1513. audioMimeType: null,
  1514. videoMimeType: null,
  1515. codecs: null,
  1516. audioCodec: null,
  1517. videoCodec: null,
  1518. primary: audioTrack ? audioTrack.kind == 'main' : false,
  1519. roles: [],
  1520. forced: false,
  1521. audioRoles: null,
  1522. videoId: null,
  1523. audioId: null,
  1524. audioGroupId: null,
  1525. channelsCount: null,
  1526. audioSamplingRate: null,
  1527. spatialAudio: false,
  1528. tilesLayout: null,
  1529. audioBandwidth: null,
  1530. videoBandwidth: null,
  1531. originalVideoId: videoTrack ? videoTrack.id : null,
  1532. originalAudioId: audioTrack ? audioTrack.id : null,
  1533. originalTextId: null,
  1534. originalImageId: null,
  1535. accessibilityPurpose: null,
  1536. originalLanguage: language,
  1537. };
  1538. if (audioTrack && audioTrack.kind) {
  1539. track.roles = [audioTrack.kind];
  1540. track.audioRoles = [audioTrack.kind];
  1541. }
  1542. if (audioTrack && audioTrack.configuration) {
  1543. if (audioTrack.configuration.codec) {
  1544. track.audioCodec = audioTrack.configuration.codec;
  1545. track.codecs = track.audioCodec;
  1546. }
  1547. if (audioTrack.configuration.bitrate) {
  1548. track.audioBandwidth = audioTrack.configuration.bitrate;
  1549. track.bandwidth += track.audioBandwidth;
  1550. }
  1551. if (audioTrack.configuration.sampleRate) {
  1552. track.audioSamplingRate = audioTrack.configuration.sampleRate;
  1553. }
  1554. if (audioTrack.configuration.numberOfChannels) {
  1555. track.channelsCount = audioTrack.configuration.numberOfChannels;
  1556. }
  1557. }
  1558. if (videoTrack && videoTrack.configuration) {
  1559. if (videoTrack.configuration.codec) {
  1560. track.videoCodec = videoTrack.configuration.codec;
  1561. if (track.codecs) {
  1562. track.codecs += ',' + track.videoCodec;
  1563. } else {
  1564. track.codecs = track.videoCodec;
  1565. }
  1566. }
  1567. if (videoTrack.configuration.bitrate) {
  1568. track.videoBandwidth = videoTrack.configuration.bitrate;
  1569. track.bandwidth += track.videoBandwidth;
  1570. }
  1571. if (videoTrack.configuration.framerate) {
  1572. track.frameRate = videoTrack.configuration.framerate;
  1573. }
  1574. if (videoTrack.configuration.width) {
  1575. track.width = videoTrack.configuration.width;
  1576. }
  1577. if (videoTrack.configuration.height) {
  1578. track.height = videoTrack.configuration.height;
  1579. }
  1580. if (videoTrack.configuration.colorSpace &&
  1581. videoTrack.configuration.colorSpace.transfer) {
  1582. switch (videoTrack.configuration.colorSpace.transfer) {
  1583. case 'pq':
  1584. track.hdr = 'PQ';
  1585. break;
  1586. case 'hlg':
  1587. track.hdr = 'HLG';
  1588. break;
  1589. case 'bt709':
  1590. track.hdr = 'SDR';
  1591. break;
  1592. }
  1593. }
  1594. }
  1595. return track;
  1596. }
  1597. /**
  1598. * Determines if the given variant is playable.
  1599. * @param {!shaka.extern.Variant} variant
  1600. * @return {boolean}
  1601. */
  1602. static isPlayable(variant) {
  1603. return variant.allowedByApplication &&
  1604. variant.allowedByKeySystem &&
  1605. variant.disabledUntilTime == 0;
  1606. }
  1607. /**
  1608. * Filters out unplayable variants.
  1609. * @param {!Array<!shaka.extern.Variant>} variants
  1610. * @return {!Array<!shaka.extern.Variant>}
  1611. */
  1612. static getPlayableVariants(variants) {
  1613. return variants.filter((variant) => {
  1614. return shaka.util.StreamUtils.isPlayable(variant);
  1615. });
  1616. }
  1617. /**
  1618. * Chooses streams according to the given config.
  1619. * Works both for Stream and Track types due to their similarities.
  1620. *
  1621. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1622. * @param {string} preferredLanguage
  1623. * @param {string} preferredRole
  1624. * @param {boolean} preferredForced
  1625. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1626. */
  1627. static filterStreamsByLanguageAndRole(
  1628. streams, preferredLanguage, preferredRole, preferredForced) {
  1629. const LanguageUtils = shaka.util.LanguageUtils;
  1630. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1631. let chosen = streams;
  1632. // Start with the set of primary streams.
  1633. /** @type {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} */
  1634. const primary = streams.filter((stream) => {
  1635. return stream.primary;
  1636. });
  1637. if (primary.length) {
  1638. chosen = primary;
  1639. }
  1640. // Now reduce the set to one language. This covers both arbitrary language
  1641. // choice and the reduction of the "primary" stream set to one language.
  1642. const firstLanguage = chosen.length ? chosen[0].language : '';
  1643. chosen = chosen.filter((stream) => {
  1644. return stream.language == firstLanguage;
  1645. });
  1646. // Find the streams that best match our language preference. This will
  1647. // override previous selections.
  1648. if (preferredLanguage) {
  1649. const closestLocale = LanguageUtils.findClosestLocale(
  1650. LanguageUtils.normalize(preferredLanguage),
  1651. streams.map((stream) => stream.language));
  1652. // Only replace |chosen| if we found a locale that is close to our
  1653. // preference.
  1654. if (closestLocale) {
  1655. chosen = streams.filter((stream) => {
  1656. const locale = LanguageUtils.normalize(stream.language);
  1657. return locale == closestLocale;
  1658. });
  1659. }
  1660. }
  1661. // Filter by forced preference
  1662. chosen = chosen.filter((stream) => {
  1663. return stream.forced == preferredForced;
  1664. });
  1665. // Now refine the choice based on role preference.
  1666. if (preferredRole) {
  1667. const roleMatches = shaka.util.StreamUtils.filterStreamsByRole_(
  1668. chosen, preferredRole);
  1669. if (roleMatches.length) {
  1670. return roleMatches;
  1671. } else {
  1672. shaka.log.warning('No exact match for the text role could be found.');
  1673. }
  1674. } else {
  1675. // Prefer text streams with no roles, if they exist.
  1676. const noRoleMatches = chosen.filter((stream) => {
  1677. return stream.roles.length == 0;
  1678. });
  1679. if (noRoleMatches.length) {
  1680. return noRoleMatches;
  1681. }
  1682. }
  1683. // Either there was no role preference, or it could not be satisfied.
  1684. // Choose an arbitrary role, if there are any, and filter out any other
  1685. // roles. This ensures we never adapt between roles.
  1686. const allRoles = chosen.map((stream) => {
  1687. return stream.roles;
  1688. }).reduce(shaka.util.Functional.collapseArrays, []);
  1689. if (!allRoles.length) {
  1690. return chosen;
  1691. }
  1692. return shaka.util.StreamUtils.filterStreamsByRole_(chosen, allRoles[0]);
  1693. }
  1694. /**
  1695. * Filter Streams by role.
  1696. * Works both for Stream and Track types due to their similarities.
  1697. *
  1698. * @param {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>} streams
  1699. * @param {string} preferredRole
  1700. * @return {!Array<!shaka.extern.Stream>|!Array<!shaka.extern.Track>}
  1701. * @private
  1702. */
  1703. static filterStreamsByRole_(streams, preferredRole) {
  1704. return streams.filter((stream) => {
  1705. return stream.roles.includes(preferredRole);
  1706. });
  1707. }
  1708. /**
  1709. * Checks if the given stream is an audio stream.
  1710. *
  1711. * @param {shaka.extern.Stream} stream
  1712. * @return {boolean}
  1713. */
  1714. static isAudio(stream) {
  1715. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1716. return stream.type == ContentType.AUDIO;
  1717. }
  1718. /**
  1719. * Checks if the given stream is a video stream.
  1720. *
  1721. * @param {shaka.extern.Stream} stream
  1722. * @return {boolean}
  1723. */
  1724. static isVideo(stream) {
  1725. const ContentType = shaka.util.ManifestParserUtils.ContentType;
  1726. return stream.type == ContentType.VIDEO;
  1727. }
  1728. /**
  1729. * Get all non-null streams in the variant as an array.
  1730. *
  1731. * @param {shaka.extern.Variant} variant
  1732. * @return {!Array<shaka.extern.Stream>}
  1733. */
  1734. static getVariantStreams(variant) {
  1735. const streams = [];
  1736. if (variant.audio) {
  1737. streams.push(variant.audio);
  1738. }
  1739. if (variant.video) {
  1740. streams.push(variant.video);
  1741. }
  1742. return streams;
  1743. }
  1744. /**
  1745. * Indicates if some of the variant's streams are fastSwitching.
  1746. *
  1747. * @param {shaka.extern.Variant} variant
  1748. * @return {boolean}
  1749. */
  1750. static isFastSwitching(variant) {
  1751. if (variant.audio && variant.audio.fastSwitching) {
  1752. return true;
  1753. }
  1754. if (variant.video && variant.video.fastSwitching) {
  1755. return true;
  1756. }
  1757. return false;
  1758. }
  1759. /**
  1760. * Set the best iframe stream to the original stream.
  1761. *
  1762. * @param {!shaka.extern.Stream} stream
  1763. * @param {!Array<!shaka.extern.Stream>} iFrameStreams
  1764. */
  1765. static setBetterIFrameStream(stream, iFrameStreams) {
  1766. if (!iFrameStreams.length) {
  1767. return;
  1768. }
  1769. const validStreams = iFrameStreams.filter((iFrameStream) =>
  1770. shaka.util.MimeUtils.getNormalizedCodec(stream.codecs) ==
  1771. shaka.util.MimeUtils.getNormalizedCodec(iFrameStream.codecs))
  1772. .sort((a, b) => {
  1773. if (!a.bandwidth || !b.bandwidth || a.bandwidth == b.bandwidth) {
  1774. return (a.width || 0) - (b.width || 0);
  1775. }
  1776. return a.bandwidth - b.bandwidth;
  1777. });
  1778. stream.trickModeVideo = validStreams[0];
  1779. if (validStreams.length > 1) {
  1780. const sameResolutionStream = validStreams.find((iFrameStream) =>
  1781. stream.width == iFrameStream.width &&
  1782. stream.height == iFrameStream.height);
  1783. if (sameResolutionStream) {
  1784. stream.trickModeVideo = sameResolutionStream;
  1785. }
  1786. }
  1787. }
  1788. /**
  1789. * Returns a string of a variant, with the attribute values of its audio
  1790. * and/or video streams for log printing.
  1791. * @param {shaka.extern.Variant} variant
  1792. * @return {string}
  1793. * @private
  1794. */
  1795. static getVariantSummaryString_(variant) {
  1796. const summaries = [];
  1797. if (variant.audio) {
  1798. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1799. variant.audio));
  1800. }
  1801. if (variant.video) {
  1802. summaries.push(shaka.util.StreamUtils.getStreamSummaryString_(
  1803. variant.video));
  1804. }
  1805. return summaries.join(', ');
  1806. }
  1807. /**
  1808. * Returns a string of an audio or video stream for log printing.
  1809. * @param {shaka.extern.Stream} stream
  1810. * @return {string}
  1811. * @private
  1812. */
  1813. static getStreamSummaryString_(stream) {
  1814. // Accepted parameters for Chromecast can be found (internally) at
  1815. // go/cast-mime-params
  1816. if (shaka.util.StreamUtils.isAudio(stream)) {
  1817. return 'type=audio' +
  1818. ' codecs=' + stream.codecs +
  1819. ' bandwidth='+ stream.bandwidth +
  1820. ' channelsCount=' + stream.channelsCount +
  1821. ' audioSamplingRate=' + stream.audioSamplingRate;
  1822. }
  1823. if (shaka.util.StreamUtils.isVideo(stream)) {
  1824. return 'type=video' +
  1825. ' codecs=' + stream.codecs +
  1826. ' bandwidth=' + stream.bandwidth +
  1827. ' frameRate=' + stream.frameRate +
  1828. ' width=' + stream.width +
  1829. ' height=' + stream.height;
  1830. }
  1831. return 'unexpected stream type';
  1832. }
  1833. /**
  1834. * Clears underlying decoding config cache.
  1835. */
  1836. static clearDecodingConfigCache() {
  1837. shaka.util.StreamUtils.decodingConfigCache_.clear();
  1838. }
  1839. /**
  1840. * Check if we should show text on screen automatically.
  1841. *
  1842. * @param {?shaka.extern.Stream} audioStream
  1843. * @param {shaka.extern.Stream} textStream
  1844. * @param {!shaka.extern.PlayerConfiguration} config
  1845. * @return {boolean}
  1846. */
  1847. static shouldInitiallyShowText(audioStream, textStream, config) {
  1848. const AutoShowText = shaka.config.AutoShowText;
  1849. if (config.autoShowText == AutoShowText.NEVER) {
  1850. return false;
  1851. }
  1852. if (config.autoShowText == AutoShowText.ALWAYS) {
  1853. return true;
  1854. }
  1855. const LanguageUtils = shaka.util.LanguageUtils;
  1856. /** @type {string} */
  1857. const preferredTextLocale =
  1858. LanguageUtils.normalize(config.preferredTextLanguage);
  1859. /** @type {string} */
  1860. const textLocale = LanguageUtils.normalize(textStream.language);
  1861. if (config.autoShowText == AutoShowText.IF_PREFERRED_TEXT_LANGUAGE) {
  1862. // Only the text language match matters.
  1863. return LanguageUtils.areLanguageCompatible(
  1864. textLocale,
  1865. preferredTextLocale);
  1866. }
  1867. if (config.autoShowText == AutoShowText.IF_SUBTITLES_MAY_BE_NEEDED) {
  1868. if (!audioStream) {
  1869. return false;
  1870. }
  1871. /* The text should automatically be shown if the text is
  1872. * language-compatible with the user's text language preference, but not
  1873. * compatible with the audio. These are cases where we deduce that
  1874. * subtitles may be needed.
  1875. *
  1876. * For example:
  1877. * preferred | chosen | chosen |
  1878. * text | text | audio | show
  1879. * -----------------------------------
  1880. * en-CA | en | jp | true
  1881. * en | en-US | fr | true
  1882. * fr-CA | en-US | jp | false
  1883. * en-CA | en-US | en-US | false
  1884. *
  1885. */
  1886. /** @type {string} */
  1887. const audioLocale = LanguageUtils.normalize(audioStream.language);
  1888. return (
  1889. LanguageUtils.areLanguageCompatible(textLocale, preferredTextLocale) &&
  1890. !LanguageUtils.areLanguageCompatible(audioLocale, textLocale));
  1891. }
  1892. shaka.log.alwaysWarn('Invalid autoShowText setting!');
  1893. return false;
  1894. }
  1895. };
  1896. /**
  1897. * A cache of results from mediaCapabilities.decodingInfo, indexed by the
  1898. * (stringified) decodingConfig.
  1899. *
  1900. * @type {Map<string, !MediaCapabilitiesDecodingInfo>}
  1901. * @private
  1902. */
  1903. shaka.util.StreamUtils.decodingConfigCache_ = new Map();
  1904. /** @private {number} */
  1905. shaka.util.StreamUtils.nextTrackId_ = 0;
  1906. /**
  1907. * @enum {string}
  1908. */
  1909. shaka.util.StreamUtils.DecodingAttributes = {
  1910. SMOOTH: 'smooth',
  1911. POWER: 'powerEfficient',
  1912. };
  1913. /**
  1914. * @private {!Map<string, boolean>}
  1915. */
  1916. shaka.util.StreamUtils.supportedImageMimeTypes_ = new Map()
  1917. .set('image/svg+xml', true)
  1918. .set('image/png', true)
  1919. .set('image/jpeg', true)
  1920. .set('image/jpg', true);
  1921. /**
  1922. * @const {string}
  1923. * @private
  1924. */
  1925. // cspell: disable-next-line
  1926. shaka.util.StreamUtils.minWebPImage_ = 'data:image/webp;base64,UklGRjoAAABXRU' +
  1927. 'JQVlA4IC4AAACyAgCdASoCAAIALmk0mk0iIiIiIgBoSygABc6WWgAA/veff/0PP8bA//LwY' +
  1928. 'AAA';
  1929. /**
  1930. * @const {string}
  1931. * @private
  1932. */
  1933. // cspell: disable-next-line
  1934. shaka.util.StreamUtils.minAvifImage_ = 'data:image/avif;base64,AAAAIGZ0eXBhdm' +
  1935. 'lmAAAAAGF2aWZtaWYxbWlhZk1BMUIAAADybWV0YQAAAAAAAAAoaGRscgAAAAAAAAAAcGljd' +
  1936. 'AAAAAAAAAAAAAAAAGxpYmF2aWYAAAAADnBpdG0AAAAAAAEAAAAeaWxvYwAAAABEAAABAAEA' +
  1937. 'AAABAAABGgAAAB0AAAAoaWluZgAAAAAAAQAAABppbmZlAgAAAAABAABhdjAxQ29sb3IAAAA' +
  1938. 'AamlwcnAAAABLaXBjbwAAABRpc3BlAAAAAAAAAAIAAAACAAAAEHBpeGkAAAAAAwgICAAAAA' +
  1939. 'xhdjFDgQ0MAAAAABNjb2xybmNseAACAAIAAYAAAAAXaXBtYQAAAAAAAAABAAEEAQKDBAAAA' +
  1940. 'CVtZGF0EgAKCBgANogQEAwgMg8f8D///8WfhwB8+ErK42A=';
  1941. /**
  1942. * @const {!Map<string, string>}
  1943. * @private
  1944. */
  1945. shaka.util.StreamUtils.minImage_ = new Map()
  1946. .set('image/webp', shaka.util.StreamUtils.minWebPImage_)
  1947. .set('image/avif', shaka.util.StreamUtils.minAvifImage_);