Skip to content

Commit

Permalink
add audio only support to fragment tracker
Browse files Browse the repository at this point in the history
  • Loading branch information
David Kim authored and David Kim committed Dec 8, 2017
1 parent 515cd2e commit d0e690b
Show file tree
Hide file tree
Showing 3 changed files with 122 additions and 54 deletions.
2 changes: 1 addition & 1 deletion src/controller/stream-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ class StreamController extends EventHandler {
logger.log(`Loading ${frag.sn} of [${levelDetails.startSN} ,${levelDetails.endSN}],level ${level}, currentTime:${pos.toFixed(3)},bufferEnd:${bufferEnd.toFixed(3)}`);
// Check if fragment is attempting to load or already loaded with bad PTS
let ftState = this.fragmentTracker.getState(frag);
if(frag.backtracked === true || (ftState !== FragmentTrackerState.LOADING_BUFFER && ftState !== FragmentTrackerState.PARTIAL)) {
if(ftState !== FragmentTrackerState.LOADING_BUFFER && ftState !== FragmentTrackerState.PARTIAL) {
frag.autoLevel = this.hls.autoLevelEnabled;
frag.bitrateTest = this.bitrateTest;

Expand Down
66 changes: 34 additions & 32 deletions src/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import Event from '../events';

const bufferPadding = 0.2;
function getFragmentKey(fragment) {
return fragment.level + '_' + fragment.sn;
return `${fragment.type}_${fragment.level}_${fragment.sn}`;
}

export const FragmentTrackerState = {
Expand Down Expand Up @@ -91,43 +91,45 @@ export class FragmentTracker extends EventHandler {
let fragKey = getFragmentKey(fragment);
for(let type in this.timeRanges) {
if (this.timeRanges.hasOwnProperty(type)) {
let timeRange = this.timeRanges[type];

// Check for malformed fragments
fragmentGaps = [];
for (let i = 0; i < timeRange.length; i++) {
startTime = timeRange.start(i) - bufferPadding;
endTime = timeRange.end(i) + bufferPadding;

if (fragment.startPTS >= startTime && fragment.endPTS <= endTime) {
// Fragment is entirely contained in buffer
// No need to check the other timeRange times since it's completely playable
break;
} else if (fragment.startPTS < endTime && fragment.endPTS > startTime) {
// Check for intersection with buffer
// Get playable sections of the fragment
fragmentGaps.push({
startPTS: Math.max(fragment.startPTS, timeRange.start(i)),
endPTS: Math.min(fragment.endPTS, timeRange.end(i))
});
if(fragment.type === 'main' || fragment.type === type) {
let timeRange = this.timeRanges[type];

// Check for malformed fragments
fragmentGaps = [];
for (let i = 0; i < timeRange.length; i++) {
startTime = timeRange.start(i) - bufferPadding;
endTime = timeRange.end(i) + bufferPadding;
if (fragment.startPTS >= startTime && fragment.endPTS <= endTime) {
// Fragment is entirely contained in buffer
// No need to check the other timeRange times since it's completely playable
break;
} else if (fragment.startPTS < endTime && fragment.endPTS > startTime) {
// Check for intersection with buffer
// Get playable sections of the fragment
fragmentGaps.push({
startPTS: Math.max(fragment.startPTS, timeRange.start(i)),
endPTS: Math.min(fragment.endPTS, timeRange.end(i))
});
}
}
}

if (fragmentGaps.length > 0) {
if(this.config.debug) {
let fragmentGapString = '';
for (let key in fragmentGaps) {
let time = fragmentGaps[key];
fragmentGapString += `[${time.startPTS}, ${time.endPTS}]`;
if (fragmentGaps.length > 0) {
if(this.config.debug) {
let fragmentGapString = '';
for (let key in fragmentGaps) {
let time = fragmentGaps[key];
fragmentGapString += `[${time.startPTS}, ${time.endPTS}]`;
}
logger.warn(`fragment-tracker: fragment with malformed PTS detected(${type}), level: ${fragment.level} sn: ${fragment.sn} startPTS: ${fragment.startPTS} endPTS: ${fragment.endPTS} loadedPTS: ${fragmentGapString}`);
}
logger.warn(`fragment-tracker: fragment with malformed PTS detected(${type}), level: ${fragment.level} sn: ${fragment.sn} startPTS: ${fragment.startPTS} endPTS: ${fragment.endPTS} loadedPTS: ${fragmentGapString}`);
}

if(!this.partialFragmentTimes[type]) {
this.partialFragmentTimes[type] = {};
if(!this.partialFragmentTimes[type]) {
this.partialFragmentTimes[type] = {};
}
this.partialFragmentTimes[type][fragKey] = fragmentGaps;
this.partialFragments[fragKey] = fragment;
}
this.partialFragmentTimes[type][fragKey] = fragmentGaps;
this.partialFragments[fragKey] = fragment;
}
}
}
Expand Down
108 changes: 87 additions & 21 deletions tests/unit/helper/fragment-tracker.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ function createMockBuffer(buffered) {
};
}

describe.only('FragmentTracker', () => {
describe('FragmentTracker', () => {
describe('getPartialFragment', () => {
let hls, fragmentTracker, fragment, buffered, partialFragment;

Expand All @@ -25,6 +25,7 @@ describe.only('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

Expand All @@ -35,10 +36,12 @@ describe.only('FragmentTracker', () => {
},
]);

hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: buffered,
audio: buffered
}});
}
});

hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

Expand All @@ -58,7 +61,7 @@ describe.only('FragmentTracker', () => {
});

describe('getState', () => {
let hls, fragmentTracker, fragment, segment, buffered;
let hls, fragmentTracker, fragment, buffered;

hls = new Hls({});
fragmentTracker = new FragmentTracker(hls);
Expand All @@ -68,10 +71,7 @@ describe.only('FragmentTracker', () => {
endPTS: 1,
sn: 1,
level: 1,
};
segment = {
type: 'video',
fragment: fragment
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

Expand All @@ -86,10 +86,12 @@ describe.only('FragmentTracker', () => {
endPTS: 1
},
]);
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: buffered,
audio: buffered
}});
}
});

hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

Expand All @@ -103,10 +105,12 @@ describe.only('FragmentTracker', () => {
endPTS: 2
},
]);
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: buffered,
audio: buffered
}});
}
});

hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

Expand All @@ -120,10 +124,12 @@ describe.only('FragmentTracker', () => {
endPTS: 2
},
]);
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: buffered,
audio: buffered
}});
}
});
hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

assert.strictEqual(fragmentTracker.getState(fragment), FragmentTrackerState.PARTIAL);
Expand All @@ -135,15 +141,34 @@ describe.only('FragmentTracker', () => {
endPTS: 2
},
]);
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: buffered,
audio: buffered
}});
}
});
assert.strictEqual(fragmentTracker.getState(fragment), FragmentTrackerState.NONE);
});
});

describe('onFragBuffered', () => {
let hls, fragmentTracker, fragment;

hls = new Hls({});
fragmentTracker = new FragmentTracker(hls);

it('supports audio buffer', () => {
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
fragment = {
startPTS: 0,
endPTS: 1,
sn: 1,
level: 1,
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });

hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: createMockBuffer([
{
startPTS: 0,
Expand All @@ -156,14 +181,24 @@ describe.only('FragmentTracker', () => {
endPTS: 2
},
])
}});
}
});
hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

assert.strictEqual(fragmentTracker.getState(fragment), FragmentTrackerState.PARTIAL);
});

it('supports video', () => {
hls.trigger(Event.BUFFER_APPENDED, {timeRanges: {
it('supports video buffer', () => {
fragment = {
startPTS: 0,
endPTS: 1,
sn: 1,
level: 1,
type: 'main'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: createMockBuffer([
{
startPTS: 0.5,
Expand All @@ -176,10 +211,41 @@ describe.only('FragmentTracker', () => {
endPTS: 2
},
])
}});
}
});
hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

assert.strictEqual(fragmentTracker.getState(fragment), FragmentTrackerState.PARTIAL);
});

it('supports audio only buffer', () => {
fragment = {
startPTS: 0,
endPTS: 1,
sn: 1,
level: 1,
type: 'audio'
};
hls.trigger(Event.FRAG_LOADED, { frag: fragment });
hls.trigger(Event.BUFFER_APPENDED, {
timeRanges: {
video: createMockBuffer([
{
startPTS: 0.5,
endPTS: 2
},
]),
audio: createMockBuffer([
{
startPTS: 0,
endPTS: 2
},
])
}
});
hls.trigger(Event.FRAG_BUFFERED, { frag: fragment });

assert.strictEqual(fragmentTracker.getState(fragment), FragmentTrackerState.NONE);
});
});
});

0 comments on commit d0e690b

Please sign in to comment.