Skip to content

Commit

Permalink
add channel type constant
Browse files Browse the repository at this point in the history
  • Loading branch information
davidkim9 committed Dec 15, 2017
1 parent 54717c7 commit ea1ae42
Show file tree
Hide file tree
Showing 7 changed files with 53 additions and 27 deletions.
16 changes: 10 additions & 6 deletions src/controller/audio-stream-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
import { findFragWithCC } from '../utils/discontinuities';
import {FragmentState} from '../helper/fragment-tracker';
import * as MediaChannels from '../media-channels';

const State = {
STOPPED : 'STOPPED',
Expand Down Expand Up @@ -341,14 +342,12 @@ class AudioStreamController extends EventHandler {
} else {
logger.log(`Loading ${frag.sn}, cc: ${frag.cc} of [${trackDetails.startSN} ,${trackDetails.endSN}],track ${trackId}, currentTime:${pos},bufferEnd:${bufferEnd.toFixed(3)}`);
// Check if fragment is not loaded
let ftState = this.fragmentTracker.getState(frag);
if(ftState === FragmentState.NOT_LOADED) {
if(this.fragmentTracker.getState(frag) === FragmentState.NOT_LOADED) {
this.fragCurrent = frag;
this.startFragRequested = true;
if (!isNaN(frag.sn)) {
this.nextLoadPosition = frag.start + frag.duration;
}
frag.audioOnly = true;
hls.trigger(Event.FRAG_LOADING, {frag: frag});
this.state = State.FRAG_LOADING;
}
Expand Down Expand Up @@ -617,12 +616,12 @@ class AudioStreamController extends EventHandler {
let tracks = data.tracks, track;

// delete any video track found on audio demuxer
if (tracks.video) {
delete tracks.video;
if (tracks[MediaChannels.VIDEO]) {
delete tracks[MediaChannels.VIDEO];
}

// include levelCodec in audio and video tracks
track = tracks.audio;
track = tracks[MediaChannels.AUDIO];
if(track) {
track.levelCodec = track.codec;
track.id = data.id;
Expand Down Expand Up @@ -664,6 +663,11 @@ class AudioStreamController extends EventHandler {
data.endDTS = data.startDTS + fragCurrent.duration;
}

if(!fragCurrent.contentTypes) {
fragCurrent.contentTypes = new Set();
}
fragCurrent.contentTypes.add(data.type);

logger.log(`parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb}`);
LevelHelper.updateFragPTSDTS(track.details,fragCurrent,data.startPTS,data.endPTS);

Expand Down
7 changes: 4 additions & 3 deletions src/controller/buffer-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import EventHandler from '../event-handler';
import {logger} from '../utils/logger';
import {ErrorTypes, ErrorDetails} from '../errors';
import {getMediaSource} from '../helper/mediasource-helper';
import * as MediaChannels from '../media-channels';

const MediaSource = getMediaSource();

Expand Down Expand Up @@ -48,7 +49,7 @@ class BufferController extends EventHandler {

onLevelPtsUpdated(data) {
let type = data.type;
let audioTrack = this.tracks.audio;
let audioTrack = this.tracks[MediaChannels.AUDIO];

// Adjusting `SourceBuffer.timestampOffset` (desired point in the timeline where the next frames should be appended)
// in Chrome browser when we detect MPEG audio container and time delta between level PTS and `SourceBuffer.timestampOffset`
Expand All @@ -58,7 +59,7 @@ class BufferController extends EventHandler {
// More info here: https://github.com/video-dev/hls.js/issues/332#issuecomment-257986486

if (type === 'audio' && audioTrack && audioTrack.container === 'audio/mpeg') { // Chrome audio mp3 track
let audioBuffer = this.sourceBuffer.audio;
let audioBuffer = this.sourceBuffer[MediaChannels.AUDIO];
let delta = Math.abs(audioBuffer.timestampOffset - data.start);

// adjust timestamp offset if time delta is greater than 100ms
Expand Down Expand Up @@ -203,7 +204,7 @@ class BufferController extends EventHandler {
onSBUpdateEnd() {
// update timestampOffset
if (this.audioTimestampOffset) {
let audioBuffer = this.sourceBuffer.audio;
let audioBuffer = this.sourceBuffer[MediaChannels.AUDIO];
logger.warn('change mpeg audio timestamp offset from ' + audioBuffer.timestampOffset + ' to ' + this.audioTimestampOffset);
audioBuffer.timestampOffset = this.audioTimestampOffset;
delete this.audioTimestampOffset;
Expand Down
20 changes: 12 additions & 8 deletions src/controller/stream-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import TimeRanges from '../utils/timeRanges';
import {ErrorTypes, ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
import { alignDiscontinuities } from '../utils/discontinuities';

import * as MediaChannels from '../media-channels';

const State = {
STOPPED : 'STOPPED',
Expand Down Expand Up @@ -511,7 +511,6 @@ class StreamController extends EventHandler {
if(frag.backtracked || fragState === FragmentState.NOT_LOADED) {
frag.autoLevel = this.hls.autoLevelEnabled;
frag.bitrateTest = this.bitrateTest;
frag.audioOnly = false;

this.hls.trigger(Event.FRAG_LOADING, {frag: frag});
// lazy demuxer init, as this could take some time ... do it during frag loading
Expand Down Expand Up @@ -1092,19 +1091,24 @@ class StreamController extends EventHandler {
data.id === 'main' &&
fragNew.sn === fragCurrent.sn &&
fragNew.level === fragCurrent.level &&
!(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
!(data.type === MediaChannels.AUDIO && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
this.state === State.PARSING) {
var level = this.levels[this.level],
frag = fragCurrent;
if (isNaN(data.endPTS)) {
data.endPTS = data.startPTS + fragCurrent.duration;
data.endDTS = data.startDTS + fragCurrent.duration;
}
if (isNaN(data.endPTS)) {
data.endPTS = data.startPTS + fragCurrent.duration;
data.endDTS = data.startDTS + fragCurrent.duration;
}

if(!frag.contentTypes) {
frag.contentTypes = new Set();
}
frag.contentTypes.add(data.type);

logger.log(`Parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb},dropped:${data.dropped || 0}`);

// Detect gaps in a fragment and try to fix it by finding a keyframe in the previous fragment (see _findFragments)
if(data.type === 'video') {
if(data.type === MediaChannels.VIDEO) {
frag.dropped = data.dropped;
if (frag.dropped) {
if (!frag.backtracked) {
Expand Down
4 changes: 1 addition & 3 deletions src/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,17 +66,15 @@ export class FragmentTracker extends EventHandler {
* @param {Object} fragment Check the fragment against all sourceBuffers loaded
*/
detectPartialFragments(fragment) {
let fragmentBuffered;
let fragKey = this.getFragmentKey(fragment);
let fragmentEntity = this.fragments[fragKey];
fragmentEntity.buffered = true;
let timeRange;
for(let type in this.timeRanges) {
if (this.timeRanges.hasOwnProperty(type)) {
if((fragment.audioOnly === true && type === 'audio') || fragment.audioOnly === false) {
if(fragment.contentTypes.has(type) === true) {
timeRange = this.timeRanges[type];
// Check for malformed fragments
fragmentBuffered = [];
// Gaps need to be calculated for each type
fragmentEntity.range[type] = this.getBufferedTimes(fragment.startPTS, fragment.endPTS, timeRange);
}
Expand Down
17 changes: 17 additions & 0 deletions src/media-channels.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/**
* Audio channel constant for source buffers and tracks
* eg. sourceBuffer[MediaChannels.AUDIO] instead of sourceBuffer.audio
* @constant
* @default
* @type {string}
*/
export const AUDIO = 'audio';

/**
* Video channel constant for source buffers and tracks
* eg. sourceBuffer[MediaChannels.VIDEO] instead of sourceBuffer.video
* @constant
* @default
* @type {string}
*/
export const VIDEO = 'video';
5 changes: 3 additions & 2 deletions src/remux/mp4-remuxer.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import AAC from '../helper/aac';
import Event from '../events';
import {logger} from '../utils/logger';
import * as MediaChannels from '../media-channels';
import MP4 from '../remux/mp4-generator';
import {ErrorTypes, ErrorDetails} from '../errors';

Expand Down Expand Up @@ -128,7 +129,7 @@ class MP4Remuxer {
audioTrack.codec = 'mp3';
}
}
tracks.audio = {
tracks[MediaChannels.AUDIO] = {
container : container,
codec : audioTrack.codec,
initSegment : !audioTrack.isAAC && typeSupported.mpeg ? new Uint8Array() : MP4.initSegment([audioTrack]),
Expand All @@ -147,7 +148,7 @@ class MP4Remuxer {
// we use input time scale straight away to avoid rounding issues on frame duration / cts computation
const inputTimeScale = videoTrack.inputTimeScale;
videoTrack.timescale = inputTimeScale;
tracks.video = {
tracks[MediaChannels.VIDEO] = {
container : 'video/mp4',
codec : videoTrack.codec,
initSegment : MP4.initSegment([videoTrack]),
Expand Down
11 changes: 6 additions & 5 deletions tests/unit/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import Event from "../../../src/events";
const assert = require('assert');

import {FragmentTracker, FragmentState} from '../../../src/helper/fragment-tracker';
import * as MediaChannels from '../../../src/media-channels';
import Hls from '../../../src/hls';

function createMockBuffer(buffered) {
Expand All @@ -25,7 +26,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
audioOnly: false
contentTypes: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO])
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

Expand Down Expand Up @@ -73,7 +74,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 0,
audioOnly: false
contentTypes: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO])
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
};
Expand Down Expand Up @@ -170,7 +171,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
audioOnly: false
contentTypes: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO])
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

Expand Down Expand Up @@ -201,7 +202,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
audioOnly: false
contentTypes: new Set([MediaChannels.AUDIO, MediaChannels.VIDEO])
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
hls.trigger(Event.BUFFER_APPENDED, {
Expand Down Expand Up @@ -231,7 +232,7 @@ describe('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
audioOnly: true
contentTypes: new Set([MediaChannels.AUDIO])
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
hls.trigger(Event.BUFFER_APPENDED, {
Expand Down

0 comments on commit ea1ae42

Please sign in to comment.