Shared memory rewrite

This commit is contained in:
Thulinma 2014-04-04 19:50:40 +02:00
parent afcddbfca6
commit cd2fe225c5
81 changed files with 7775 additions and 5411 deletions

View file

@ -220,7 +220,7 @@ TAB_SIZE = 2
# "Side Effects:". You can put \n's in the value part of an alias to insert
# newlines.
ALIASES =
ALIASES = "api=\xrefitem api \"API call\" \"API calls\""
# This tag can be used to specify a number of word-keyword mappings (TCL only).
# A mapping has the form "name=value". For example adding "class=itcl::class"
@ -794,7 +794,7 @@ EXCLUDE_SYMLINKS = NO
# Note that the wildcards are matched against the file with absolute path, so to
# exclude all test directories for example use the pattern */test/*
EXCLUDE_PATTERNS = */.git/*
EXCLUDE_PATTERNS = */.git/* */tinythread.*
# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
# (namespaces, classes, functions, etc.) that should be excluded from the

162
Makefile
View file

@ -23,7 +23,7 @@ LDLIBS = -lmist -lrt
.DEFAULT_GOAL := all
all: controller buffers connectors analysers converters
all: MistConnHTTP controller analysers inputs outputs
DOXYGEN := $(shell doxygen -v 2> /dev/null)
ifdef DOXYGEN
@ -33,71 +33,15 @@ $(warning Doxygen not installed - not building source documentation.)
endif
controller: MistController
MistController: override LDLIBS += $(THREADLIB)
MistController: src/controller/server.html.h src/controller/*
$(CXX) $(LDFLAGS) $(CPPFLAGS) src/controller/*.cpp $(LDLIBS) -o $@
buffers: MistPlayer
MistPlayer: src/buffer/player.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
buffers: MistBuffer
MistBuffer: override LDLIBS += $(THREADLIB)
MistBuffer: src/buffer/buffer.cpp src/buffer/buffer_stream.h src/buffer/buffer_stream.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) src/buffer/buffer.cpp src/buffer/buffer_stream.cpp $(LDLIBS) -o $@
connectors: MistConnRaw
MistConnRaw: src/connectors/conn_raw.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnRTMP
MistConnRTMP: src/connectors/conn_rtmp.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTP
MistConnHTTP: override LDLIBS += $(THREADLIB)
MistConnHTTP: src/connectors/conn_http.cpp src/connectors/embed.js.h src/connectors/icon.h
$(CXX) $(LDFLAGS) $(CPPFLAGS) $< $(LDLIBS) -o $@
connectors: MistConnHTTPProgressiveFLV
MistConnHTTPProgressiveFLV: src/connectors/conn_http_progressive_flv.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPProgressiveMP3
MistConnHTTPProgressiveMP3: src/connectors/conn_http_progressive_mp3.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPProgressiveMP4
MistConnHTTPProgressiveMP4: src/connectors/conn_http_progressive_mp4.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPProgressiveOGG
MistConnHTTPProgressiveOGG: src/connectors/conn_http_progressive_ogg.cpp src/converters/oggconv.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPDynamic
MistConnHTTPDynamic: src/connectors/conn_http_dynamic.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPSmooth
MistConnHTTPSmooth: src/connectors/conn_http_smooth.cpp src/connectors/xap.h
$(CXX) $(LDFLAGS) $(CPPFLAGS) $< $(LDLIBS) -o $@
connectors: MistConnHTTPLive
MistConnHTTPLive: src/connectors/conn_http_live.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPSRT
MistConnHTTPSRT: src/connectors/conn_http_srt.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnHTTPJSON
MistConnHTTPJSON: src/connectors/conn_http_json.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
connectors: MistConnTS
MistConnTS: src/connectors/conn_ts.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
analysers: MistAnalyserRTMP
MistAnalyserRTMP: src/analysers/rtmp_analyser.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
@ -134,14 +78,6 @@ converters: MistFLV2DTSC
MistFLV2DTSC: src/converters/flv2dtsc.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
converters: MistOGG2DTSC
MistOGG2DTSC: src/converters/ogg2dtsc.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
converters: MistDTSC2OGG
MistDTSC2OGG: src/converters/dtsc2ogg.cpp src/converters/oggconv.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
converters: MistDTSCFix
MistDTSCFix: src/converters/dtscfix.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
@ -162,8 +98,98 @@ converters: MistDTSC2SRT
MistDTSC2SRT: src/converters/dtsc2srt.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
inputs: MistInDTSC
MistInDTSC: override LDLIBS += $(THREADLIB)
MistInDTSC: override CPPFLAGS += "-DINPUTTYPE=\"input_dtsc.h\""
MistInDTSC: src/input/mist_in.cpp src/input/input.cpp src/input/input_dtsc.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
inputs: MistInFLV
MistInFLV: override LDLIBS += $(THREADLIB)
MistInFLV: override CPPFLAGS += "-DINPUTTYPE=\"input_flv.h\""
MistInFLV: src/input/mist_in.cpp src/input/input.cpp src/input/input_flv.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
inputs: MistInOGG
MistInOGG: override LDLIBS += $(THREADLIB)
MistInOGG: override CPPFLAGS += "-DINPUTTYPE=\"input_ogg.h\""
MistInOGG: src/input/mist_in.cpp src/input/input.cpp src/input/input_ogg.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
inputs: MistInBuffer
MistInBuffer: override LDLIBS += $(THREADLIB)
MistInBuffer: override CPPFLAGS += "-DINPUTTYPE=\"input_buffer.h\""
MistInBuffer: src/input/mist_in.cpp src/input/input.cpp src/input/input_buffer.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutFLV
MistOutFLV: override LDLIBS += $(THREADLIB)
MistOutFLV: override CPPFLAGS += "-DOUTPUTTYPE=\"output_progressive_flv.h\""
MistOutFLV: src/output/mist_out.cpp src/output/output.cpp src/output/output_progressive_flv.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutMP4
MistOutMP4: override LDLIBS += $(THREADLIB)
MistOutMP4: override CPPFLAGS += "-DOUTPUTTYPE=\"output_progressive_mp4.h\""
MistOutMP4: src/output/mist_out.cpp src/output/output.cpp src/output/output_progressive_mp4.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutMP3
MistOutMP3: override LDLIBS += $(THREADLIB)
MistOutMP3: override CPPFLAGS += "-DOUTPUTTYPE=\"output_progressive_mp3.h\""
MistOutMP3: src/output/mist_out.cpp src/output/output.cpp src/output/output_progressive_mp3.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutRTMP
MistOutRTMP: override LDLIBS += $(THREADLIB)
MistOutRTMP: override CPPFLAGS += "-DOUTPUTTYPE=\"output_rtmp.h\""
MistOutRTMP: src/output/mist_out.cpp src/output/output.cpp src/output/output_rtmp.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutRaw
MistOutRaw: override LDLIBS += $(THREADLIB)
MistOutRaw: override CPPFLAGS += "-DOUTPUTTYPE=\"output_raw.h\""
MistOutRaw: src/output/mist_out.cpp src/output/output.cpp src/output/output_raw.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutTS
MistOutTS: override LDLIBS += $(THREADLIB)
MistOutTS: override CPPFLAGS += "-DOUTPUTTYPE=\"output_ts.h\""
MistOutTS: src/output/mist_out.cpp src/output/output.cpp src/output/output_ts.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutHSS
MistOutHSS: override LDLIBS += $(THREADLIB)
MistOutHSS: override CPPFLAGS += "-DOUTPUTTYPE=\"output_hss.h\""
MistOutHSS: src/output/mist_out.cpp src/output/output.cpp src/output/output_hss.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutHLS
MistOutHLS: override LDLIBS += $(THREADLIB)
MistOutHLS: override CPPFLAGS += "-DOUTPUTTYPE=\"output_hls.h\""
MistOutHLS: src/output/mist_out.cpp src/output/output.cpp src/output/output_hls.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutHDS
MistOutHDS: override LDLIBS += $(THREADLIB)
MistOutHDS: override CPPFLAGS += "-DOUTPUTTYPE=\"output_hds.h\""
MistOutHDS: src/output/mist_out.cpp src/output/output.cpp src/output/output_hds.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutSRT
MistOutSRT: override LDLIBS += $(THREADLIB)
MistOutSRT: override CPPFLAGS += "-DOUTPUTTYPE=\"output_srt.h\""
MistOutSRT: src/output/mist_out.cpp src/output/output.cpp src/output/output_srt.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
outputs: MistOutJSON
MistOutJSON: override LDLIBS += $(THREADLIB)
MistOutJSON: override CPPFLAGS += "-DOUTPUTTYPE=\"output_json.h\""
MistOutJSON: src/output/mist_out.cpp src/output/output.cpp src/output/output_json.cpp
$(CXX) $(LDFLAGS) $(CPPFLAGS) $^ $(LDLIBS) -o $@
BUILT_SOURCES=controller/server.html.h connectors/embed.js.h
lspSOURCES=lsp/jquery.js lsp/placeholder.js lsp/md5.js lsp/main.js lsp/pages.js lsp/tablesort.js
lspSOURCES=lsp/plugins/jquery.js lsp/plugins/placeholder.js lsp/plugins/md5.js lsp/main.js lsp/pages.js lsp/plugins/tablesort.js lsp/plugins/jquery.flot.min.js lsp/plugins/jquery.flot.time.min.js lsp/plugins/jquery.flot.crosshair.min.js
lspDATA=lsp/header.html lsp/main.css lsp/footer.html
JAVA := $(shell which java 2> /dev/null)
@ -201,7 +227,7 @@ clean:
rm -f *.o Mist* sourcery src/controller/server.html src/connectors/embed.js.h src/controller/server.html.h
rm -rf ./docs
install: controller buffers connectors analysers converters
install: all
install ./Mist* $(DESTDIR)$(bindir)
uninstall:

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,4 @@
<html>
<head>
<meta http-equiv='content-type' content='text/html;charset=utf-8' />
<title>MistServer Manager</title>
<title>MistServer MI</title>

5
lsp/jquery.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -14,6 +14,8 @@ function consolelog() {
}
}
var ih = false;
function confirmDelete(question){
return confirm(question);
}
@ -56,6 +58,151 @@ function formatTime(date){
('00' + d.getSeconds()).slice(-2)
].join(':');
}
/**
* Format a time duration to something like "2 days, 00:00:00.000"
* @param ms the duration to format in miliseconds
*/
function formatDuration(ms) {
var secs = Math.floor(ms / 1000), mins = 0;
ms = ms % 1000;
if (secs >= 60) {
mins = Math.floor(secs / 60);
secs = secs % 60;
}
if (mins >= 60) {
var hours = Math.floor(mins / 60);
mins = mins % 60;
}
var string = ('00'+mins).slice(-2)+':'+('00'+secs).slice(-2)+'.'+('000'+ms).slice(-3);
if (hours >= 24) {
var days = Math.floor(hours / 24);
hours = hours % 24;
}
if (hours > 0) {
string = ('00'+hours).slice(-2)+':'+string;
}
if (days > 0) {
string = days+' day'+(days > 1 ? 's' : '')+', '+string
}
return string;
}
/**
* Capitalize the first letter
* @param string the string
*/
function capFirstChar(string) {
if (string.length <= 0) { return ''; }
return string[0].toUpperCase() + string.slice(1);
}
/**
* Flot tick generator for bandwidth
* @param axis the axis
*/
function flotTicksBandwidthAxis(axis) {
var range = axis.max - axis.min;
var delta = range / 4;
var start = axis.min;
if (axis.max < 1024) { // unit: bytes/s
if (delta > 100) { delta = Math.floor(delta/100)*100; start = Math.floor(start/100)*100; } // to lowest 100 bytes/s
else if (delta > 10) { delta = Math.floor(delta/10)*10; start = Math.floor(start/10)*10; } // to lowest 10 bytes/s
}
else if (axis.max < 1048576) { //unit: kiB/s
if (delta > 102400) { delta = Math.floor(delta/102400)*102400; start = Math.floor(start/102400)*102400; } //to lowest 100 kiB/s
else if (delta > 10240) { delta = Math.floor(delta/10240)*10240; start = Math.floor(start/10240)*10240; } //to lowest 10 kiB/s
else if (delta > 1024) { delta = Math.floor(delta/1024)*1024; start = Math.floor(start/1024)*1024; } //to lowest 1 kiB/s
else { delta = Math.floor(delta/102.4)*102.4; start = Math.floor(start/102.4)*102.4; } //to lowest 0.1 kiB/s
}
else { //unit: miB/s
if (delta > 104857600) { delta = Math.floor(delta/104857600)*104857600; start = Math.floor(start/104857600)*104857600; } //to lowest 100 miB/s
else if (delta > 10485760) { delta = Math.floor(delta/10485760)*10485760; start = Math.floor(start/10485760)*10485760; } //to lowest 10 miB/s
else if (delta > 1048576) { delta = Math.floor(delta/1048576)*1048576; start = Math.floor(start/1048576)*1048576; } //to lowest 1 miB/s
else { delta = Math.floor(delta/104857.6)*104857.6; start = Math.floor(start/104857.6)*104857.6; } //to lowest 0.1 miB/s
}
var out = [];
for (var i = start; i <= axis.max; i += delta) {
out.push(i);
}
return out;
}
/**
* Flot axis formatter for bandwidth
* @param val the valuea
* @param axis the axis
*/
function flotFormatBandwidthAxis(val,axis) {
if (val < 0) { var sign = '-'; }
else { var sign = ''; }
val = Math.abs(val);
if (val < 1024) { return sign+Math.round(val)+' bytes/s'; } // 0 bytes/s through 1023 bytes/s
if (val < 10235) { return sign+(val/1024).toFixed(2)+' kiB/s'; } // 1.00 kiB/s through 9.99 kiB/s
if (val < 102449) { return sign+(val/1024).toFixed(1)+' kiB/s'; } // 10.0 kiB/s through 99.9 kiB/s
if (val < 1048064) { return sign+Math.round(val/1024)+' kiB/s'; } // 100 kiB/s through 1023 kiB/s
if (val < 10480518) { return sign+(val/1048576).toFixed(2)+' miB/s'; } // 1.00 miB/s through 9.99 miB/s
if (val < 104805172) { return sign+(val/1048576).toFixed(1)+' miB/s'; } // 10.0 miB/s through 99.9 miB/s
return sign+Math.round(val/1048576)+' miB/s'; // 100 miB/s and up
}
/**
* Converts the statistics data into something flot understands
* @param stats the statistics.totals object
* @param cumulative cumulative mode if true
*/
function convertStatisticsToFlotFormat(stats,islive) {
var plotdata = [
{ label: 'Viewers', data: []},
{ label: 'Bandwidth (Up)', data: [], yaxis: 2},
{ label: 'Bandwidth (Down)', data: [], yaxis: 2}
];
var oldtimestamp = 0;
var i = 0, up = 0, down = 0;
for (var timestamp in stats) {
if (islive) {
i++;
up += stats[timestamp].up;
down += stats[timestamp].down;
//average over 5 seconds to prevent super spiky unreadable graph
if ((i % 5) == 0) {
plotdata[0].data.push([Number(timestamp)*1000,stats[timestamp].count]);
plotdata[1].data.push([Number(timestamp)*1000,up/5]);
plotdata[2].data.push([Number(timestamp)*1000,down/5]);
up = 0;
down = 0;
}
}
else {
var dt = timestamp - oldtimestamp;
if (stats[oldtimestamp]) {
var up = (stats[timestamp].up - stats[oldtimestamp].up)/dt;
var down = (stats[timestamp].down - stats[oldtimestamp].down)/dt;
}
else {
var up = stats[timestamp].up;
var down = stats[timestamp].down;
}
plotdata[0].data.push([Number(timestamp)*1000,stats[timestamp].count]);
plotdata[1].data.push([Number(timestamp)*1000,up]);
plotdata[2].data.push([Number(timestamp)*1000,down]);
oldtimestamp = timestamp;
}
}
for (var timestamp in stats) {
var dt = timestamp - oldtimestamp;
plotdata[0].data.push([Number(timestamp)*1000,stats[timestamp].count]);
if (stats[oldtimestamp]) {
var up = (stats[timestamp].up - stats[oldtimestamp].up)/dt;
var down = (stats[timestamp].down - stats[oldtimestamp].down)/dt;
}
else {
var up = stats[timestamp].up;
var down = stats[timestamp].down;
}
plotdata[1].data.push([Number(timestamp)*1000,up]);
plotdata[2].data.push([Number(timestamp)*1000,down]);
oldtimestamp = timestamp;
}
return plotdata;
}
/**
* Check if an URL points to a live datastream or a recorded file
* @param url the url in question
@ -136,9 +283,10 @@ function applyInput(){
//apply the inputs
$('input.isSetting,select.isSetting').each(function(){
var objpath = findObjPath($(this));
if ($(this).val() == '') {
if (($(this).val() == '') || ($(this).val() == 0)) {
eval('delete '+objpath+';');
}
else {
@ -163,6 +311,37 @@ function findObjPath($element) {
}
}
function ihAddBalloons() {
var page = settings.ih.pages[settings.currentpage];
if (!page) { return; }
//something with pageinfo
if (page.pageinfo) {
$('#page').prepend(
$('<div>').addClass('ih-balloon').addClass('pageinfo').html(page.pageinfo)
);
}
for (inputid in page.inputs) {
$('#'+inputid).parent().prepend(
$('<div>').addClass('ih-balloon').addClass('inputinfo').attr('data-for',inputid).html(page.inputs[inputid]).hide()
);
$('#'+inputid).focus(function(){
$('.ih-balloon[data-for='+$(this).attr('id')+']').show();
$('.ih-balloon.pageinfo').hide();
}).blur(function(){
$('.ih-balloon[data-for='+$(this).attr('id')+']').hide();
$('.ih-balloon.pageinfo').show();
});
}
$('#page label').each(function(){
$(this)
});
}
function ihMakeBalloon(contents,forid) {
return $('<div>').addClass('ih-balloon').attr('data-for',forid).html(contents).hide();
}
function getData(callBack,sendData,timeOut,doShield){
timeOut = timeOut | 30000;
var data = {};
@ -283,6 +462,81 @@ function getData(callBack,sendData,timeOut,doShield){
var jqxhr = $.ajax(obj);
}
function getWikiData(url,callBack) {
var wikiHost = 'http://rework.mistserver.org'; //must be changed when rework goes live
$('#message').removeClass('red').text('Connecting to the MistServer wiki..').append(
$('<br>')
).append(
$('<a>').text('Cancel request').click(function(){
jqxhr.abort();
})
);
var obj = {
'url': wikiHost+url,
'type': 'GET',
'crossDomain': true,
'data': {
'skin': 'plain'
},
'error':function(jqXHR,textStatus,errorThrown){
switch (textStatus) {
case 'timeout':
textStatus = $('<i>').text('The connection timed out. ');
break;
case 'abort':
textStatus = $('<i>').text('The connection was aborted. ');
break;
default:
textStatus = $('<i>').text(textStatus+'. ').css('text-transform','capitalize');
}
$('#message').addClass('red').text('An error occurred while attempting to communicate with the MistServer wiki:').append(
$('<br>')
).append(
textStatus
).append(
$('<a>').text('Send server request again').click(function(){
getWikiData(url,callback);
})
);
},
'success': function(returnedData){
$('#message').text('Wiki data received');
//convert to DOM elements
//returnedData = $.parseHTML(returnedData);
returnedData = $(returnedData);
//fix broken slash-links in the imported data
returnedData.find('a[href]').each(function(){
if ((this.hostname == '') || (this.hostname == undefined)) {
$(this).attr('href',wikiHost+$(this).attr('href'));
}
if (!$(this).attr('target')) {
$(this).attr('target','_blank');
}
}).find('img[src]').each(function(){
var a = $('<a>').attr('href',$(this).attr('src'));
if ((a.hostname == '') || (a.hostname == undefined)) {
$(this).attr('src',wikiHost+$(this).attr('src'));
}
});
consolelog('['+(new Date).toTimeString().split(' ')[0]+']','Received wiki data:',returnedData);
if (callBack) {
callBack(returnedData);
}
$('#message').text('Last communication with the MistServer wiki at '+formatTime((new Date).getTime()/1000));
}
};
var jqxhr = $.ajax(obj);
}
function saveAndReload(tabName){
var sendData = $.extend(true,{},settings.settings);
delete sendData.logs;
@ -345,12 +599,8 @@ function updateOverview() {
var streams = 0;
var streamsOnline = 0;
for (var index in data.statistics) {
if (data.statistics[index].curr) {
for (viewer in data.statistics[index].curr) {
viewers++;
}
}
if (data.clients && data.clients.data) {
viewers = data.clients.data.length;
}
for (var index in data.streams) {
@ -363,7 +613,9 @@ function updateOverview() {
$('#cur_streams_online').text(streamsOnline+'/'+streams+' online');
$('#cur_num_viewers').text(seperateThousands(viewers,' '));
$('#settings-config-time').text(formatDateLong(data.config.time));
});
settings.settings.statistics = data.statistics;
},{clients: {}});
}
function updateProtocols() {
getData(function(data){
@ -380,6 +632,9 @@ function updateProtocols() {
function displayProtocolSettings(theProtocol) {
var capabilities = settings.settings.capabilities.connectors[theProtocol.connector];
if (!capabilities) {
return '';
}
var settingsList = [];
for (var index in capabilities.required) {
if ((theProtocol[index]) && (theProtocol[index] != '')) {
@ -478,20 +733,31 @@ function buildProtocolParameterFields(data,required,objpath) {
return $container.html();
}
function updateStreams() {
var streamlist = [];
for (var stream in settings.settings.streams) {
streamlist.push(stream);
}
getData(function(data){
var datafields = {};
for (var index in data.clients.fields) {
datafields[data.clients.fields[index]] = index;
}
var viewers = {};
for (var index in data.clients.data) {
if (viewers[data.clients.data[index][datafields['stream']]]) {
viewers[data.clients.data[index][datafields['stream']]]++;
}
else {
viewers[data.clients.data[index][datafields['stream']]] = 1;
}
}
for (var index in data.streams) {
$('#status-of-'+index).html(formatStatus(data.streams[index]))
$('#viewers-of-'+index).text(seperateThousands(viewers[index],' '));
}
for (var index in data.statistics) {
var viewers = 0;
if (data.statistics[index].curr) {
for (var jndex in data.statistics[index].curr) {
viewers++;
}
}
$('#viewers-of-'+index).text(seperateThousands(viewers,' '));
}
});
settings.settings.statistics = data.statistics;
},{clients:{}});
}
function filterTable() {
var displayRecorded = $('#stream-filter-recorded').is(':checked');
@ -886,12 +1152,19 @@ function conversionSelectInput(theFiles) {
applyInput();
var extension = settings.settings.conversion.convert._new_.output.split('.');
if (extension[extension.length-1] != 'dtsc') {
extension.push('dtsc');
settings.settings.conversion.convert._new_.output = extension.join('.');
}
settings.settings.conversion.convert._new_.output = settings.settings.conversion.convert._new_.outputdir.replace(/\/$/,'')+'/'+settings.settings.conversion.convert._new_.output;
delete settings.settings.conversion.convert._new_.outputdir;
if ((settings.settings.conversion.convert._new_.video) && (settings.settings.conversion.convert._new_.video.fps)) {
settings.settings.conversion.convert._new_.fpks = Math.floor(settings.settings.conversion.convert._new_.fps * 1000);
}
settings.settings.conversion.convert['c_'+(new Date).getTime()] = settings.settings.conversion.convert._new_;
delete settings.settings.conversion.convert._new_;
saveAndReload('conversion');
@ -1019,10 +1292,85 @@ function updateServerstats() {
},{capabilities:true});
}
function buildstreamembed(streamName,embedbase) {
$('#liststreams .button.current').removeClass('current')
$('#liststreams .button').filter(function(){
return $(this).text() == streamName;
}).addClass('current');
$('#subpage').append(
$('<div>').addClass('input_container').html(
$('<label>').text('The info embed URL is:').append(
$('<input>').attr('type','text').attr('readonly','readonly').val(embedbase+'info_'+streamName+'.js')
)
).append(
$('<label>').text('The embed URL is:').append(
$('<input>').attr('type','text').attr('readonly','readonly').val(embedbase+'embed_'+streamName+'.js')
)
).append(
$('<label>').text('The embed code is:').css('overflow','hidden').append(
$('<textarea>').val('<div>\n <script src="'+embedbase+'embed_'+streamName+'.js"></' + 'script>\n</div>')
)
)
).append(
$('<span>').attr('id','listprotocols').text('Loading..')
).append(
$('<p>').text('Preview:')
).append(
$('<div>').attr('id','preview-container')
);
// jQuery doesn't work -> use DOM magic
var script = document.createElement('script');
script.src = embedbase+'embed_'+streamName+'.js';
script.onload = function(){
var priority = mistvideo[streamName].source;
if (priority.length > 0) {
priority.sort(function(a,b){
return b.priority - a.priority;
});
var $table = $('<table>').html(
$('<tr>').html(
$('<th>').text('URL')
).append(
$('<th>').text('Type')
).append(
$('<th>').text('Priority')
)
);
for (var i in priority) {
$table.append(
$('<tr>').html(
$('<td>').text(priority[i].url)
).append(
$('<td>').text(priority[i].type)
).append(
$('<td>').addClass('align-center').text(priority[i].priority)
)
);
}
$('#listprotocols').html($table);
}
else {
$('#listprotocols').html('No data in info embed file.');
}
}
document.getElementById('preview-container').appendChild( script );
}
$(function(){
$('#menu div.button').click(function(){
if ((settings.settings.LTS != 1) && ($(this).hasClass('LTS-only'))) { return; }
$('#logo > a').click(function(){
if ($.isEmptyObject(settings.settings)) {
showTab('login')
}
else {
showTab('overview');
}
});
$('#menu div.button').click(function(e){
//if ((settings.settings.LTS != 1) && ($(this).hasClass('LTS-only'))) { return; }
showTab($(this).text().toLowerCase());
e.stopPropagation();
})
$('body').on('keydown',function(e){
switch (e.which) {
@ -1101,6 +1449,41 @@ $(function(){
$(this).val(v);
this.setSelectionRange(curpos,curpos);
});
$('.expandbutton').click(function(){
$(this).toggleClass('active');
});
$('#ih-button').click(function(){
if (ih) {
$('.ih-balloon').remove();
}
else {
getWikiData('/wiki/Integrated_Help',function(data){
settings.ih = {
raw: data.find('#mw-content-text').contents(),
pages: {}
}
settings.ih.raw.filter('.page[data-pagename]').each(function(){
var pagename = $(this).attr('data-pagename').replace(' ','_');
settings.ih.pages[pagename] = {
raw: $(this).contents(),
pageinfo: $(this).find('.page-description').html(),
inputs: {}
}
$(this).children('.input-description[data-inputid]').each(function(){
settings.ih.pages[pagename].inputs[$(this).attr('data-inputid')] = $(this).html();
});
});
consolelog('New integrated help data:',settings.ih);
ihAddBalloons();
});
}
ih = !ih;
$(this).toggleClass('active');
});
});
$(window).on('hashchange', function(e) {

View file

@ -6,6 +6,7 @@ var defaults = {
};
function showTab(tabName,streamName) {
settings.currentpage = tabName.replace(' ','_');
ignoreHashChange = true;
location.hash = location.hash.split('&')[0]+'&'+tabName+(streamName ? '@'+streamName : '');
@ -14,9 +15,10 @@ function showTab(tabName,streamName) {
$('#menu .button').removeClass('current').filter(function(i){
return $(this).text().toLowerCase() == tabName;
}).addClass('current');
}).addClass('current').parents('.expandbutton').addClass('active');
$('#page').html('');
$('#tooltip').remove();
clearInterval(theInterval);
$('#menu').css('visibility', 'visible');
@ -121,6 +123,10 @@ function showTab(tabName,streamName) {
saveAndReload('overview');
})
).append(
$('<button>').text('Cancel').addClass('escape-to-cancel').click(function(){
showTab('login');
})
)
);
break;
@ -434,8 +440,12 @@ function showTab(tabName,streamName) {
$('<td>').attr('id','status-of-'+index).html(formatStatus(theStream))
).append(
$('<td>').html(
$('<button>').text('Embed').click(function(){
showTab('embed',$(this).parent().parent().data('stream'))
$('<button>').text('Preview').click(function(){
showTab('preview',$(this).parent().parent().data('stream'))
})
).append(
$('<button>').text('Info').click(function(){
showTab('streaminfo',$(this).parent().parent().data('stream'))
})
)
).append(
@ -497,7 +507,7 @@ function showTab(tabName,streamName) {
$('<label>').text('Buffer time:').addClass('live-only').attr('for','settings-streams-'+streamName+'-DVR').append(
$('<span>').addClass('unit').text('[ms]')
).append(
$('<input>').attr('type','text').attr('id','settings-streams-'+streamName+'-DVR').attr('placeholder','2 keyframes').addClass('isSetting').addClass('').addClass('validate-positive-integer')
$('<input>').attr('type','text').attr('id','settings-streams-'+streamName+'-DVR').attr('placeholder','30000').addClass('isSetting').addClass('').addClass('validate-positive-integer')
)
).append(
$('<label>').text('Record to:').addClass('live-only').addClass('LTS-only').attr('for','settings-streams-'+streamName+'-record').attr('title','The path to the file to record to. Leave this field blank if you do not wish to record to file.').append(
@ -516,7 +526,7 @@ function showTab(tabName,streamName) {
$('<p>').text('Encrypt this stream')
).append(
$('<div>').addClass('description').text(
'To enable encryption, the Licene Acquisition URL must be entered, as well as either the content key or the key ID and seed.'
'To enable encryption, the Licence Acquisition URL must be entered, as well as either the content key or the key ID and seed.'
)
).append(
$('<label>').text('Licence Acquisition URL:').attr('for','settings-streams-'+streamName+'-la_url').append(
@ -578,7 +588,131 @@ function showTab(tabName,streamName) {
})
break;
case 'embed':
case 'streaminfo':
var meta = settings.settings.streams[streamName].meta;
if (!meta) {
$('#page').html('No info available for stream "'+streamName+'".');
}
else {
$meta = $('<table>').css('width','auto');
if (meta.live) {
$meta.html(
$('<tr>').html(
$('<td>').text('Type:')
).append(
$('<td>').text('Live')
)
);
}
else {
$meta.html(
$('<tr>').html(
$('<td>').text('Type:')
).append(
$('<td>').text('Pre-recorded (VoD)')
)
);
}
for (var index in meta.tracks) {
var track = meta.tracks[index];
if (track.type == '') { continue; }
var $table = $('<table>').html(
$('<tr>').html(
$('<td>').text('Type:')
).append(
$('<td>').text(capFirstChar(track.type))
)
).append(
$('<tr>').html(
$('<td>').text('Codec:')
).append(
$('<td>').text(track.codec)
)
).append(
$('<tr>').html(
$('<td>').text('Duration:')
).append(
$('<td>').html(
formatDuration(track.lastms-track.firstms)+'<br>(from '+formatDuration(track.firstms)+' to '+formatDuration(track.lastms)+')'
)
)
).append(
$('<tr>').html(
$('<td>').text('Average bitrate:')
).append(
$('<td>').text(Math.round(track.bps/1024)+' KiB/s')
)
);
if (track.height) {
$table.append(
$('<tr>').html(
$('<td>').text('Size:')
).append(
$('<td>').text(track.width+'x'+track.height+' px')
)
);
}
if (track.fpks) {
$table.append(
$('<tr>').html(
$('<td>').text('Framerate:')
).append(
$('<td>').text(track.fpks/1000+' fps')
)
);
}
if (track.channels) {
$table.append(
$('<tr>').html(
$('<td>').text('Channels:')
).append(
$('<td>').text(track.channels)
)
);
}
if (track.rate) {
$table.append(
$('<tr>').html(
$('<td>').text('Samplerate:')
).append(
$('<td>').text(seperateThousands(track.rate,' ')+' Hz')
)
);
}
$meta.append(
$('<tr>').html(
$('<td>').text(capFirstChar(index)+':')
).append(
$('<td>').html(
$table
)
)
);
}
$('#page').html(
$('<p>').text('Detailed information about stream "'+streamName+'"')
).append(
$('<div>').css({'width':'100%','display':'table','table-layout':'fixed','min-height':'300px'}).html(
$('<div>').css('display','table-row').html(
$('<div>').attr('id','info-stream-meta').css({'display':'table-cell','max-width':'50%','overflow':'auto'}).html(
$meta
)
).append(
$('<div>').attr('id','info-stream-statistics').css({'display':'table-cell','text-align':'center','min-height':'200px'})
)
)
);
}
$('#page').append(
$('<button>').text('Back').addClass('escape-to-cancel').click(function(){
showTab('streams');
})
);
break;
case 'preview':
var httpConnector = false;
for (var index in settings.settings.config.protocols) {
if ((settings.settings.config.protocols[index].connector == 'HTTP') || (settings.settings.config.protocols[index].connector == 'HTTP.exe')) {
@ -586,37 +720,29 @@ function showTab(tabName,streamName) {
}
}
if (httpConnector) {
var embedbase = 'http://'+parseURL(settings.server).host+':'+(httpConnector.port ? httpConnector.port : 8080)+'/';
$('#page').html(
$('<div>').addClass('input_container').html(
$('<p>').text('Embed info for stream "'+streamName+'"')
$('<div>').addClass('table').html(
$('<div>').addClass('row').html(
$('<div>').addClass('cell').attr('id','liststreams').addClass('menu')
).append(
$('<label>').text('The info embed URL is:').append(
$('<input>').attr('type','text').attr('readonly','readonly').val(embedbase+'info_'+streamName+'.js')
$('<div>').addClass('cell').attr('id','subpage').css('padding-left','1em')
)
).append(
$('<label>').text('The embed URL is:').append(
$('<input>').attr('type','text').attr('readonly','readonly').val(embedbase+'embed_'+streamName+'.js')
)
).append(
$('<label>').text('The embed code is:').css('overflow','hidden').append(
$('<textarea>').val('<div>\n <script src="'+embedbase+'embed_'+streamName+'.js"></' + 'script>\n</div>')
)
).append(
$('<button>').text('Back').addClass('escape-to-cancel').click(function(){
showTab('streams');
})
)
).append(
$('<p>').text('Preview:')
).append(
$('<div>').attr('id','preview-container')
);
var embedbase = 'http://'+parseURL(settings.server).host+':'+(httpConnector.port ? httpConnector.port : 8080)+'/';
// jQuery doesn't work -> use DOM magic
var script = document.createElement('script');
script.src = embedbase+'embed_'+streamName+'.js';
document.getElementById('preview-container').appendChild( script );
for (var s in settings.settings.streams) {
if (!streamName) {
streamName = s;
}
$('#liststreams').append(
$('<div>').addClass('button').text(settings.settings.streams[s].name).click(function(){
buildstreamembed($(this).text());
})
);
}
buildstreamembed(streamName,embedbase);
}
else {
$('#page').html(
@ -627,6 +753,7 @@ function showTab(tabName,streamName) {
case 'limits':
var $tbody = $('<tbody>');
$('#page').html(
$('<div>').addClass('LTS-only').html(
$('<div>').addClass('description').text('This is an overview of the limits that have been configured on MistServer.')
).append(
$('<table>').html(
@ -650,6 +777,7 @@ function showTab(tabName,streamName) {
$('<button>').text('New').click(function(){
showTab('edit limit','_new_');
})
)
);
for (var index in settings.settings.config.limits) {
@ -1041,6 +1169,551 @@ function showTab(tabName,streamName) {
}
$('#logs-refresh-every').val(defaults.logRefreshing[1]);
break;
case 'statistics':
var graphs = {};
var plot;
$('#page').html(
$('<div>').addClass('description').text('Here, you can select all kinds of data, and view them in a graph.')
).append(
$('<div>').addClass('input_container').html(
$('<p>').text('Select the data to display')
).append(
$('<label>').text('Add to graph:').append(
$('<select>').attr('id','graphid').html(
$('<option>').text('New graph').val('new')
).change(function(){
if ($(this).val() == 'new') {
$('#graphtype').removeAttr('disabled');
}
else {
$('#graphtype').attr('disabled','disabled');
//set to correct type
}
})
)
).append(
$('<label>').text('Graph x-axis type:').append(
$('<select>').attr('id','graphtype').html(
$('<option>').text('Time line').val('time')
).append(
$('<option>').text('Map').val('coords')
).change(function(){
$('#dataset option').hide();
$('#dataset option.axis_'+$(this).val()).show();
$('#dataset').val( $('#dataset option.axis_'+$(this).val()).first().val());
})
)
).append(
$('<label>').text('Select data set:').append(
$('<select>').attr('id','dataset').html(
$('<option>').text('Viewers').val('clients').addClass('axis_time')
).append(
$('<option>').text('Bandwidth (up)').val('upbps').addClass('axis_time')
).append(
$('<option>').text('Bandwidth (down)').val('downbps').addClass('axis_time')
).append(
$('<option>').text('% CPU').val('cpuload').addClass('axis_time')
).append(
$('<option>').text('Memory load').val('memload').addClass('axis_time')
).append(
$('<option>').text('Viewer location').val('coords').addClass('axis_coords')
).change(function(){
switch ($(this).val()) {
case 'clients':
case 'upbps':
case 'downbps':
$('#dataset-details .replace-dataset').text('amount of viewers')
$('#dataset-details').show();
break;
default:
$('#dataset-details').hide();
}
})
)
).append(
$('<div>').attr('id','dataset-details').addClass('checklist').css({
'padding':'0.5em 0 0 40%',
'font-size':'0.9em'
}).html('Show <span class=replace-dataset></span> for:').append(
$('<label>').text('The total').prepend(
$('<input>').attr('type','radio').attr('name','cumutype').attr('checked','checked').val('all')
)
).append(
$('<label>').text('The stream ').append(
$('<select>').addClass('stream cumuval')
).prepend(
$('<input>').attr('type','radio').attr('name','cumutype').val('stream')
)
).append(
$('<label>').text('The protocol ').append(
$('<select>').addClass('protocol cumuval')
).prepend(
$('<input>').attr('type','radio').attr('name','cumutype').val('protocol')
)
)
).append(
$('<button>').text('Add data set').click(function(){
//the graph
if ($('#graphid').val() == 'new') {
var graph = {};
graph.id = $('#graphid').val();
graph.type = $('#graphtype').val();
graph.id = 'graph_'+($('#graphcontainer .graph').length+1);
graph.datasets = [];
graphs[graph.id] = graph;
$('#graphcontainer').append(
$('<div>').attr('id',graph.id).addClass('graph-item').html(
$('<div>').addClass('legend')
).append(
$('<div>').addClass('graph')
)
);
$('#graphid').append(
$('<option>').text(graph.id)
).val(graph.id).trigger('change');
}
else {
var graph = graphs[$('#graphid').val()];
}
//the dataset itself
var d = {
display: true,
type: $('#dataset').val(),
label: '',
yaxistype: 'amount',
data: [],
lines: { show: true },
points: { show: false }
};
switch (d.type) {
case 'cpuload':
d.label = 'CPU load';
d.yaxistype = 'percentage';
break;
case 'memload':
d.label = 'Memory load';
d.yaxistype = 'percentage';
break;
case 'upbps':
case 'downbps':
case 'clients':
d.cumutype = $('#dataset-details input[name=cumutype]:checked').val();
d.yaxistype = 'bytespersec';
if (d.cumutype == 'all') {
switch (d.type) {
case 'clients':
d.label = 'Total viewers';
d.yaxistype = 'amount';
break;
case 'upbps':
d.label = 'Total bandwidth (up)';
break;
case 'downbps':
d.label = 'Total bandwidth (down)';
break;
}
}
else {
var which = $('#dataset-details.cumuval.'+d.cumutype).val();
if (d.cumutype == 'stream') {
d.stream = which;
}
else if (d.cumutype == 'protocol') {
d.protocol = which;
}
switch (d.type) {
case 'clients':
d.label = 'Viewers ('+d.stream+')';
d.yaxistype = 'amount';
break;
case 'upbps':
d.label = 'Bandwidth (up) ('+d.stream+')';
break;
case 'downbps':
d.label = 'Bandwidth (down) ('+d.stream+')';
break;
}
}
break;
}
graph.datasets.push(d);
getPlotData();
})
)/*.append(
$('<p>').text('Switch data display type').css('clear','both')
).append(
$('<label>').text('Show data in a:').append(
$('<select>').html(
$('<option>').text('graph')
).append(
$('<option>').text('table')
)
)
)*/
).append(
$('<div>').attr('id','graphcontainer')
);
for (var i in settings.settings.streams) {
$('#dataset-details .cumuval.stream').append(
$('<option>').text(settings.settings.streams[i].name).val(i)
);
}
for (var i in settings.settings.config.protocols) {
$('#dataset-details .cumuval.protocol').append(
$('<option>').text(settings.settings.config.protocols[i].connector)
);
}
$('#graphtype').trigger('change');
var lastitem = null;
var $tooltip = $('<div>').attr('id','tooltip');
$('body').append($tooltip);
$('.graph').live('plothover',function(e,pos,item){
if (item) {
var pos;
if (item.pageX > ($(window).width() / 2)) {
pos.left = 'auto';
pos.right = $(window).width() - item.pageX + 8+'px';
}
else {
pos.left = item.pageX + 8+'px';
pos.right = 'auto';
}
if (item.pageY > ($(window).height() / 2)) {
pos.top = 'auto';
pos.bottom = $(window).height() - item.pageY + 8+'px';
}
else {
pos.top = item.pageY + 8+'px';
pos.bottom = 'auto';
}
$tooltip.css({
'left': pos.left,
'top': pos.top,
'right': pos.right,
'bottom': pos.bottom
}).html(
$('<p>').text(item.series.label).prepend(
$('<div>').css({
'background-color': item.series.color,
'width': '20px',
'height': '20px',
'display': 'inline-block',
'margin': '0 0.5em'
})
)
).append(
$('<table>').html(
$('<tr>').html(
$('<td>').text('Time:')
).append(
$('<td>').text(item.series.xaxis.tickFormatter(item.datapoint[0],item.series.xaxis))
)
).append(
$('<tr>').html(
$('<td>').text(item.series.label+':')
).append(
$('<td>').text(item.series.yaxis.tickFormatter(item.datapoint[1],item.series.yaxis))
)
)
).fadeIn();
}
else {
$('#tooltip').hide();
}
});
theInterval = setInterval(function(){
getPlotData();
},10000);
function getPlotData() {
getData(function(data){
for (var j in graphs) {
for (var i in graphs[j].datasets) {
graphs[j].datasets[i] = findDataset(graphs[j].datasets[i],data);
}
drawGraph(graphs[j]);
}
},{capabilities:true,totals:{}});
}
function findDataset(dataobj,sourcedata) {
var now = sourcedata.config.time;
switch (dataobj.type) {
case 'cpuload':
//remove any data older than 10 minutes
var removebefore = false;
for (var i in dataobj.data) {
if (dataobj.data[i][0] < (now-600)*1000) {
removebefore = Number(i)+1;
}
}
if (removebefore !== false) {
dataobj.data.splice(0,removebefore);
}
dataobj.data.push([now*1000,sourcedata.capabilities.load.one]);
break;
case 'memload':
//remove any data older than 10 minutes
var removebefore = false;
for (var i in dataobj.data) {
if (dataobj.data[i][0] < (now-600)*1000) {
removebefore = Number(i)+1;
}
}
if (removebefore !== false) {
dataobj.data.splice(0,removebefore);
}
dataobj.data.push([now*1000,sourcedata.capabilities.load.memory]);
break;
case 'upbps':
case 'downbps':
case 'clients':
//todo: depending on the stream..
if (!sourcedata.totals || !sourcedata.totals.data) {
dataobj.data.push([(now-600)*1000,0]);
dataobj.data.push([now*1000,0]);
}
else {
var fields = {};
for (var index in sourcedata.totals.fields) {
fields[sourcedata.totals.fields[index]] = index;
}
var time = sourcedata.totals.start;
dataobj.data = [];
if (time > now-590) {
//prepend data with 0
dataobj.data.push([(now-600)*1000,0]);
dataobj.data.push([time*1000-1,0]);
}
var index = 0;
dataobj.data.push([[time*1000,sourcedata.totals.data[index][fields[dataobj.type]]]]);
for (var i in sourcedata.totals.interval) {
if ((i % 2) == 1) {
//fill gaps with 0
time += sourcedata.totals.interval[i][1];
dataobj.data.push([time*1000,0]);
}
else {
for (var j = 0; j < sourcedata.totals.interval[i][0]; j++) {
time += sourcedata.totals.interval[i][1];
index++;
dataobj.data.push([time*1000,sourcedata.totals.data[index][fields[dataobj.type]]]);
}
if (i < sourcedata.totals.interval.length-1) {
dataobj.data.push([time*1000+1,0]);
}
}
}
if (now > time + 10) {
//append data with 0
dataobj.data.push([time*1000+1,0]);
dataobj.data.push([now*1000,0]);
}
}
break;
}
return dataobj;
}
function drawGraph(graph){
var datasets = graph.datasets;
if (datasets.length < 1) {
$('#'+graph.id).children('.graph,.legend').html('');
return;
}
var yaxes = [];
var yaxesTemplates = {
percentage: {
name: 'percentage',
color: 'black',
display: false,
tickColor: 0,
tickDecimals: 0,
tickFormatter: function(val,axis){
return val.toFixed(axis.tickDecimals) + '%';
},
tickLength: 0,
min: 0
},
amount: {
name: 'amount',
color: 'black',
display: false,
tickColor: 0,
tickDecimals: 0,
tickFormatter: function(val,axis){
return seperateThousands(val.toFixed(axis.tickDecimals),' ');
},
tickLength: 0,
min: 0
},
bytespersec: {
name: 'bytespersec',
color: 'black',
display: false,
tickColor: 0,
tickDecimals: 1,
tickFormatter: function(val,axis){
var suffix = ['bytes','KiB','MiB','GiB','TiB','PiB'];
if (val == 0) {
val = val+' '+suffix[0];
}
else {
var exponent = Math.floor(Math.log(Math.abs(val)) / Math.log(1024));
if (exponent < 0) {
val = val.toFixed(axis.tickDecimals)+' '+suffix[0];
}
else {
val = Math.round(val / Math.pow(1024,exponent) * Math.pow(10,axis.tickDecimals)) / Math.pow(10,axis.tickDecimals) +' '+suffix[exponent];
}
}
return val + '/s';
},
tickLength: 0,
ticks: function(axis,a,b,c,d){
//taken from flot source code (function setupTickGeneration),
//modified to think in multiples of 1024 by Carina van der Meer for DDVTECH
// heuristic based on the model a*sqrt(x) fitted to
// some data points that seemed reasonable
var noTicks = 0.3 * Math.sqrt($('.graph').first().height());
var delta = (axis.max - axis.min) / noTicks,
exponent = Math.floor(Math.log(Math.abs(delta)) / Math.log(1024)),
correcteddelta = delta / Math.pow(1024,exponent),
dec = -Math.floor(Math.log(correcteddelta) / Math.LN10),
maxDec = axis.tickDecimals;
if (maxDec != null && dec > maxDec) {
dec = maxDec;
}
var magn = Math.pow(10, -dec),
norm = correcteddelta / magn, // norm is between 1.0 and 10.0
size;
if (norm < 1.5) {
size = 1;
} else if (norm < 3) {
size = 2;
// special case for 2.5, requires an extra decimal
if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) {
size = 2.5;
++dec;
}
} else if (norm < 7.5) {
size = 5;
} else {
size = 10;
}
size *= magn;
size = size * Math.pow(1024,exponent);
if (axis.minTickSize != null && size < axis.minTickSize) {
size = axis.minTickSize;
}
axis.delta = delta;
axis.tickDecimals = Math.max(0, maxDec != null ? maxDec : dec);
axis.tickSize = size;
var ticks = [],
start = axis.tickSize * Math.floor(axis.min / axis.tickSize),
i = 0,
v = Number.NaN,
prev;
do {
prev = v;
v = start + i * axis.tickSize;
ticks.push(v);
++i;
} while (v < axis.max && v != prev);
return ticks;
},
min: 0
}
};
var xaxistemplates = {
time: {
name: 'time',
mode: 'time',
timezone: 'browser',
ticks: 5
}
}
var plotsets = [];
for (var i in datasets) {
if (datasets[i].display) {
if (yaxesTemplates[datasets[i].yaxistype].display === false) {
yaxes.push(yaxesTemplates[datasets[i].yaxistype]);
yaxesTemplates[datasets[i].yaxistype].display = yaxes.length;
}
datasets[i].yaxis = yaxesTemplates[datasets[i].yaxistype].display;
datasets[i].color = Number(i);
plotsets.push(datasets[i]);
}
}
if (yaxes[0]) { yaxes[0].color = 0; }
plot = $.plot(
$('#'+graph.id+' .graph'),
plotsets,
{
legend: {show: false},
xaxis: xaxistemplates[graph.type],
yaxes: yaxes,
grid: {
hoverable: true,
borderWidth: {top: 0, right: 0, bottom: 1, left: 1},
color: 'black',
backgroundColor: {colors: ['#fff','#ededed']}
}
}
);
$('#'+graph.id+' .legend').html(
$('<div>').addClass('legend-list').addClass('checklist')
);
var plotdata = plot.getOptions();
for (var i in datasets) {
var $checkbox = $('<input>').attr('type','checkbox').data('dataset-index',i).click(function(){
if ($(this).is(':checked')) {
datasets[$(this).data('dataset-index')].display = true;
}
else {
datasets[$(this).data('dataset-index')].display = false;
}
drawGraph($(this).parents('.graph-item'));
});
if (datasets[i].display) {
$checkbox.attr('checked','checked');
}
$('#'+graph.id+' .legend-list').append(
$('<label>').html(
$checkbox
).append(
$('<div>').addClass('series-color').css('background-color',plotdata.colors[datasets[i].color % plotdata.colors.length])
).append(
datasets[i].label
)
);
}
if (datasets.length > 0) {
$('#'+graph.id+' .legend').append(
$('<button>').text('Clear all').click(function(){
var graph = graphs[$(this).parents('.graph-item').attr('id')];
graph.datasets = [];
drawGraph(graph);
}).css({'float':'none'})
);
}
}
break;
case 'server stats':
var $cont = $('<div>').addClass('input_container');
@ -1094,6 +1767,55 @@ function showTab(tabName,streamName) {
$('#page').html($cont);
break;
case 'email for help':
var config = $.extend({},settings.settings);
delete config.statistics;
config = JSON.stringify(config);
$('#page').html(
$('<div>').addClass('description').html(
'You can use this form to email MistServer support if you\'re having difficulties.<br>'
).append(
'A copy of your server config file will automatically be included.'
)
).append(
$('<div>').addClass('input_container').html(
$('<form>').html(
$('<label>').text('Your name:').append(
$('<input>').attr('type','text').attr('name','name')
)
).append(
$('<input>').attr('type','hidden').attr('name','company').val('-')
).append(
$('<label>').text('Your email address:').append(
$('<input>').attr('type','email').attr('name','email')
)
).append(
$('<input>').attr('type','hidden').attr('name','subject').val('Integrated Help')
).append(
$('<label>').text('Your message:').append(
$('<textarea>').attr('name','message').height('20em')
)
).append(
$('<label>').text('Your config file:').append(
$('<textarea>').attr('name','configfile').attr('readonly','readonly').css({'height':'20em','font-size':'0.7em'}).val(config)
)
).append(
$('<button>').text('Send').click(function(e){
var data = $(this).parents('form').serialize();
$.ajax({
type: 'POST',
url: 'http://mistserver.org/contact_us?skin=plain',
data: data,
success: function(d) {
$('#page').html(d);
}
});
e.preventDefault();
})
)
)
);
break;
case 'disconnect':
showTab('login');
$('#connection').addClass('red').removeClass('green').text('Disconnected');
@ -1110,17 +1832,32 @@ function showTab(tabName,streamName) {
if ((settings.credentials.authstring) && (!settings.settings.LTS)) {
$('.LTS-only input').add('.LTS-only select').add('.LTS-only button').attr('disabled','disabled');
$('.LTS-only, .LTS-only p, .LTS-only label, .LTS-only button ').css('color','#b4b4b4');
//$('.LTS-only, .LTS-only p, .LTS-only label, .LTS-only button').css('color','#b4b4b4');
$('.LTS-only, .LTS-only > *').filter(':not(.LTSstuff_done)').each(function(){
var t = [];
if ($(this).attr('title')) {
t.push($(this).attr('title'));
}
t.push('This is feature is only available in the LTS version.');
t.push('This feature is only available in the LTS version.');
$(this).attr('title',t.join(' ')).addClass('LTSstuff_done');
});
$('#page .LTS-only').prepend(
$('<a>').text('Upgrade to LTS').attr('target','_blank').attr('href','http://mistserver.org/products/MistServer LTS').addClass('fakebutton')
);
$('.linktoReleaseNotes.notedited').each(function(){
$(this).attr('href',$(this).attr('href')+'/'+settings.settings.config.version.split('-')[0]).removeClass('.notedited');
});
}
else if (settings.settings.LTS) {
$('.LTS-only').removeClass('LTS-only');
$('.linktoTnC.notLTSlink').attr('href','http://mistserver.org/wiki/MistServerLTS_license').removeClass('notLTSlink');
$('.linktoReleaseNotes.notedited').each(function(){
$(this).attr('href',$(this).attr('href')+'/'+settings.settings.config.version.split('-')[0]+'LTS').removeClass('.notedited');
});
}
if (ih) {
ihAddBalloons();
}
}

View file

@ -0,0 +1 @@
(function($){var options={crosshair:{mode:null,color:"rgba(170, 0, 0, 0.80)",lineWidth:1}};function init(plot){var crosshair={x:-1,y:-1,locked:false};plot.setCrosshair=function setCrosshair(pos){if(!pos)crosshair.x=-1;else{var o=plot.p2c(pos);crosshair.x=Math.max(0,Math.min(o.left,plot.width()));crosshair.y=Math.max(0,Math.min(o.top,plot.height()))}plot.triggerRedrawOverlay()};plot.clearCrosshair=plot.setCrosshair;plot.lockCrosshair=function lockCrosshair(pos){if(pos)plot.setCrosshair(pos);crosshair.locked=true};plot.unlockCrosshair=function unlockCrosshair(){crosshair.locked=false};function onMouseOut(e){if(crosshair.locked)return;if(crosshair.x!=-1){crosshair.x=-1;plot.triggerRedrawOverlay()}}function onMouseMove(e){if(crosshair.locked)return;if(plot.getSelection&&plot.getSelection()){crosshair.x=-1;return}var offset=plot.offset();crosshair.x=Math.max(0,Math.min(e.pageX-offset.left,plot.width()));crosshair.y=Math.max(0,Math.min(e.pageY-offset.top,plot.height()));plot.triggerRedrawOverlay()}plot.hooks.bindEvents.push(function(plot,eventHolder){if(!plot.getOptions().crosshair.mode)return;eventHolder.mouseout(onMouseOut);eventHolder.mousemove(onMouseMove)});plot.hooks.drawOverlay.push(function(plot,ctx){var c=plot.getOptions().crosshair;if(!c.mode)return;var plotOffset=plot.getPlotOffset();ctx.save();ctx.translate(plotOffset.left,plotOffset.top);if(crosshair.x!=-1){var adj=plot.getOptions().crosshair.lineWidth%2===0?0:.5;ctx.strokeStyle=c.color;ctx.lineWidth=c.lineWidth;ctx.lineJoin="round";ctx.beginPath();if(c.mode.indexOf("x")!=-1){var drawX=Math.round(crosshair.x)+adj;ctx.moveTo(drawX,0);ctx.lineTo(drawX,plot.height())}if(c.mode.indexOf("y")!=-1){var drawY=Math.round(crosshair.y)+adj;ctx.moveTo(0,drawY);ctx.lineTo(plot.width(),drawY)}ctx.stroke()}ctx.restore()});plot.hooks.shutdown.push(function(plot,eventHolder){eventHolder.unbind("mouseout",onMouseOut);eventHolder.unbind("mousemove",onMouseMove)})}$.plot.plugins.push({init:init,options:options,name:"crosshair",version:"1.0"})})(jQuery);

2
lsp/plugins/jquery.flot.min.js vendored Normal file

File diff suppressed because one or more lines are too long

1
lsp/plugins/jquery.flot.time.min.js vendored Normal file

File diff suppressed because one or more lines are too long

2
lsp/plugins/jquery.js vendored Executable file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -3,10 +3,12 @@
#include <string>
#include <iostream>
#include <sstream>
#include <mist/dtsc.h>
#include <mist/json.h>
#include <mist/config.h>
#include <mist/defines.h>
///\brief Holds everything unique to the analysers.
namespace Analysers {
@ -17,11 +19,34 @@ namespace Analysers {
///\return The return code of the analyser.
int analyseDTSC(Util::Config conf){
DTSC::File F(conf.getString("filename"));
std::cout << F.getMeta().toJSON().toPrettyString() << std::endl;
std::stringstream meta;
F.getMeta().toPrettyString(meta,0, 0x03);
std::cout << meta.str() << std::endl;
int bPos = 0;
F.seek_bpos(0);
F.parseNext();
while (F.getJSON()){
std::cout << F.getJSON().toPrettyString() << std::endl;
JSON::Value tmp;
std::string tmpStr;
while (F.getPacket()){
tmpStr = std::string(F.getPacket().getData(), F.getPacket().getDataLen());
switch (F.getPacket().getVersion()){
case DTSC::DTSC_V1: {
unsigned int i = 8;
JSON::fromDTMI((const unsigned char*)tmpStr.data(), tmpStr.size(), i, tmp);
break;
}
case DTSC::DTSC_V2: {
unsigned int i = 8;
JSON::fromDTMI2((const unsigned char*)tmpStr.data(), tmpStr.size(), i, tmp);
break;
}
default:
DEBUG_MSG(DLVL_WARN,"Invalid dtsc packet @ bpos %d", bPos);
break;
}
std::cout << tmp.toPrettyString() << std::endl;
bPos = F.getBytePos();
F.parseNext();
}
return 0;

View file

@ -10,7 +10,73 @@
#include <mist/theora.h>
namespace Analysers{
int analyseOGG(){
std::string Opus_prettyPacket(char * part,int len){
if (len < 1){
return "Invalid packet (0 byte length)";
}
std::stringstream r;
char config = part[0] >> 3;
char code = part[0] & 3;
if ((part[0] & 4) == 4){r << "Stereo, ";}else{r << "Mono, ";}
if (config < 14){
r << "SILK, ";
if (config < 4){r << "NB, ";}
if (config < 8 && config > 3){r << "MB, ";}
if (config < 14 && config > 7){r << "WB, ";}
if (config % 4 == 0){r << "10ms";}
if (config % 4 == 1){r << "20ms";}
if (config % 4 == 2){r << "40ms";}
if (config % 4 == 3){r << "60ms";}
}
if (config < 16 && config > 13){
r << "Hybrid, ";
if (config < 14){r << "SWB, ";}else{r << "FB, ";}
if (config % 2 == 0){r << "10ms";}else{r << "20ms";}
}
if (config > 15){
r << "CELT, ";
if (config < 20){r << "NB, ";}
if (config < 24 && config > 19){r << "WB, ";}
if (config < 28 && config > 23){r << "SWB, ";}
if (config > 27){r << "FB, ";}
if (config % 4 == 0){r << "2.5ms";}
if (config % 4 == 1){r << "5ms";}
if (config % 4 == 2){r << "10ms";}
if (config % 4 == 3){r << "20ms";}
}
if (code == 0){
r << ": 1 packet (" << (len-1) << "b)";
return r.str();
}
if (code == 1){
r << ": 2 packets (" << ((len-1)/2) << "b / " << ((len-1)/2) << "b)";
return r.str();
}
if (code == 2){
if (len < 2){
return "Invalid packet (code 2 must be > 1 byte long)";
}
if (part[1] < 252){
r << ": 2 packets (" << (int)part[1] << "b / " << (int)(len-2-part[1]) << "b)";
}else{
int ilen = part[1] + part[2]*4;
r << ": 2 packets (" << ilen << "b / " << (int)(len-3-ilen) << "b)";
}
return r.str();
}
//code 3
bool VBR = (part[1] & 128) == 128;
bool pad = (part[1] & 64) == 64;
bool packets = (part[1] & 63);
r << ": " << packets << " packets (VBR = " << VBR << ", padding = " << pad << ")";
return r.str();
}
int analyseOGG(int argc, char ** argv){
Util::Config conf = Util::Config(argv[0], PACKAGE_VERSION);
conf.addOption("pages", JSON::fromString("{\"long\":\"pages\", \"short\":\"p\", \"long_off\":\"nopages\", \"short_off\":\"P\", \"default\":0, \"help\":\"Enable/disable printing of Ogg pages\"}"));
conf.parseArgs(argc, argv);
std::map<int,std::string> sn2Codec;
std::string oggBuffer;
OGG::Page oggPage;
@ -22,17 +88,94 @@ namespace Analysers{
}
//while OGG::page check function read
while (oggPage.read(oggBuffer)){//reading ogg to string
oggPage.setInternalCodec("");
if (oggPage.getHeaderType() & 0x02){
//print the Ogg page details, if requested
if (conf.getBool("pages")){
std::cout << oggPage.toPrettyString() << std::endl;
}
//attempt to detect codec if this is the first page of a stream
if (oggPage.getHeaderType() & OGG::BeginOfStream){
if (memcmp("theora",oggPage.getFullPayload() + 1,6) == 0){
sn2Codec[oggPage.getBitstreamSerialNumber()] = "theora";
sn2Codec[oggPage.getBitstreamSerialNumber()] = "Theora";
}
if (memcmp("vorbis",oggPage.getFullPayload() + 1,6) == 0){
sn2Codec[oggPage.getBitstreamSerialNumber()] = "vorbis";
sn2Codec[oggPage.getBitstreamSerialNumber()] = "Vorbis";
}
if (memcmp("OpusHead",oggPage.getFullPayload(),8) == 0){
sn2Codec[oggPage.getBitstreamSerialNumber()] = "Opus";
}
if (sn2Codec[oggPage.getBitstreamSerialNumber()] != ""){
std::cout << "Bitstream " << oggPage.getBitstreamSerialNumber() << " recognized as " << sn2Codec[oggPage.getBitstreamSerialNumber()] << std::endl;
}else{
std::cout << "Bitstream " << oggPage.getBitstreamSerialNumber() << " could not be recognized as any known codec" << std::endl;
}
}
if (sn2Codec[oggPage.getBitstreamSerialNumber()] == "Theora"){
std::cout << "Theora data" << std::endl;
int offset = 0;
for (unsigned int i = 0; i < oggPage.getSegmentTableDeque().size(); i++){
theora::header tmpHeader;
int len = oggPage.getSegmentTableDeque()[i];
if (tmpHeader.read(oggPage.getFullPayload()+offset,len)){
std::cout << tmpHeader.toPrettyString(2);
}
theora::frame tmpFrame;
if (tmpFrame.read(oggPage.getFullPayload()+offset,len)){
std::cout << tmpFrame.toPrettyString(2);
}
offset += len;
}
}else if(sn2Codec[oggPage.getBitstreamSerialNumber()] == "Vorbis"){
std::cout << "Vorbis data" << std::endl;
int offset = 0;
for (unsigned int i = 0; i < oggPage.getSegmentTableDeque().size(); i++){
vorbis::header tmpHeader;
int len = oggPage.getSegmentTableDeque()[i];
if (tmpHeader.read(oggPage.getFullPayload()+offset,len)){
std::cout << tmpHeader.toPrettyString(2);
}
offset += len;
}
}else if(sn2Codec[oggPage.getBitstreamSerialNumber()] == "Opus"){
std::cout << "Opus data" << std::endl;
int offset = 0;
for (unsigned int i = 0; i < oggPage.getSegmentTableDeque().size(); i++){
int len = oggPage.getSegmentTableDeque()[i];
char * part = oggPage.getFullPayload() + offset;
if (len >= 8 && memcmp(part, "Opus", 4) == 0){
if (memcmp(part, "OpusHead", 8) == 0){
std::cout << " Version: " << (int)(part[8]) << std::endl;
std::cout << " Channels: " << (int)(part[9]) << std::endl;
std::cout << " Pre-skip: " << (int)(part[10] + (part[11] << 8)) << std::endl;
std::cout << " Orig. sample rate: " << (int)(part[12] + (part[13] << 8) + (part[14] << 16) + (part[15] << 24)) << std::endl;
std::cout << " Gain: " << (int)(part[16] + (part[17] << 8)) << std::endl;
std::cout << " Channel map: " << (int)(part[18]) << std::endl;
if (part[18] > 0){
std::cout << " Channel map family " << (int)(part[18]) << " not implemented - output incomplete" << std::endl;
}
}
if (memcmp(part, "OpusTags", 8) == 0){
unsigned int vendor_len = part[8] + (part[9]<<8) + (part[10]<<16) + (part[11]<<24);
std::cout << " Vendor: " << std::string(part+12, vendor_len) << std::endl;
char * str_data = part+12+vendor_len;
unsigned int strings = str_data[0] + (str_data[1]<<8) + (str_data[2]<<16) + (str_data[3]<<24);
std::cout << " Tags: (" << strings << ")" << std::endl;
str_data += 4;
for (unsigned int j = 0; j < strings; j++){
unsigned int strlen = str_data[0] + (str_data[1]<<8) + (str_data[2]<<16) + (str_data[3]<<24);
str_data += 4;
std::cout << " [" << j << "] " << std::string(str_data, strlen) << std::endl;
str_data += strlen;
}
}
}else{
std::cout << " " << Opus_prettyPacket(part,len) << std::endl;
}
offset += len;
}
}
oggPage.setInternalCodec(sn2Codec[oggPage.getBitstreamSerialNumber()]);
std::cout << oggPage.toPrettyString() << std::endl;
}
}
return 0;
@ -40,8 +183,6 @@ namespace Analysers{
}
int main(int argc, char ** argv){
Util::Config conf = Util::Config(argv[0], PACKAGE_VERSION);
conf.parseArgs(argc, argv);
return Analysers::analyseOGG();
return Analysers::analyseOGG(argc, argv);
}

View file

@ -1,345 +0,0 @@
/// \file buffer.cpp
/// Contains the main code for the Buffer.
#include <fcntl.h>
#include <iostream>
#include <string>
#include <vector>
#include <cstdlib>
#include <cstdio>
#include <string.h>
#include <unistd.h>
#include <signal.h>
#include <sstream>
#include <sys/time.h>
#include <mist/config.h>
#include <mist/timing.h>
#include "buffer_stream.h"
#include <mist/stream.h>
#include <mist/defines.h>
/// Holds all code unique to the Buffer.
namespace Buffer {
volatile bool buffer_running = true; ///< Set to false when shutting down.
Stream * thisStream = 0;
Socket::Server SS; ///< The server socket.
///\brief A function running in a thread to send all statistics.
///\param empty A null pointer.
void handleStats(void * empty){
#if defined(_TTHREAD_POSIX_) && defined(WITH_THREADNAMES) && !(defined(__FreeBSD__) || defined(__APPLE__) || defined(__MACH__) || defined(_WIN32) || defined(__CYGWIN__))
pthread_setname_np(pthread_self(), "StatsHandler");
#endif
if (empty != 0){
return;
}
std::string double_newline = "\n\n";
Socket::Connection StatsSocket = Socket::Connection(Util::getTmpFolder() + "statistics", true);
while (buffer_running){
Util::sleep(1000); //sleep one second
if ( !StatsSocket.connected()){
StatsSocket = Socket::Connection(Util::getTmpFolder() + "statistics", true);
}
if (StatsSocket.connected()){
StatsSocket.SendNow(Stream::get()->getStats());
StatsSocket.SendNow(double_newline);
if (StatsSocket.spool()){
//Got a response.
buffer_running = false;
}
}
}
StatsSocket.close();
}
///\brief A function to handle input data.
///\param conn A socket reference.
void handlePushIn(Socket::Connection & conn){
#if defined(_TTHREAD_POSIX_) && defined(WITH_THREADNAMES) && !(defined(__FreeBSD__) || defined(__APPLE__) || defined(__MACH__) || defined(_WIN32) || defined(__CYGWIN__))
pthread_setname_np(pthread_self(), "Push Input");
#endif
conn.setBlocking(true);
int sockNo = 0;
while (buffer_running && conn.connected()){
while (thisStream->parsePacket(conn)){
//do nothing while parsing
}
Util::sleep(10);//sleep to prevent high CPU usage
}
conn.close();
if (buffer_running){
thisStream->endStream();
}
long long int wait_time = Util::getMS();
while (Util::getMS() - wait_time < thisStream->metadata.bufferWindow){
Util::sleep(thisStream->metadata.bufferWindow - (Util::getMS() - wait_time));
}
thisStream->removeSocket(sockNo);
}
///\brief A function running a thread to handle input data through stdin.
///Automatically slows down to realtime playback.
///\param empty A null pointer.
void handleStdin(void * empty){
if (empty != 0){
return;
}
#if defined(_TTHREAD_POSIX_) && defined(WITH_THREADNAMES) && !(defined(__FreeBSD__) || defined(__APPLE__) || defined(__MACH__) || defined(_WIN32) || defined(__CYGWIN__))
pthread_setname_np(pthread_self(), "Standard Input");
#endif
long long int timeDiff = 0; //difference between local time and stream time
unsigned int lastPacket = 0; //last parsed packet timestamp
std::string inBuffer;
char charBuffer[1024 * 10];
unsigned int charCount;
long long int now;
while (std::cin.good() && buffer_running){
//slow down packet receiving to real-time
now = Util::getMS();
if (((now - timeDiff) >= lastPacket) || (lastPacket - (now - timeDiff) > 15000)){
if (thisStream->parsePacket(inBuffer)){
lastPacket = thisStream->getTime();
if ((now - timeDiff - lastPacket) > 15000 || (now - timeDiff - lastPacket < -15000)){
timeDiff = now - lastPacket;
}
}else{
std::cin.read(charBuffer, 1024 * 10);
charCount = std::cin.gcount();
inBuffer.append(charBuffer, charCount);
}
}else{
Util::sleep(std::min(15LL, lastPacket - (now - timeDiff)));
}
}
buffer_running = false;
}
///\brief A function running in a thread to handle a new user connection.
///\param v_usr The user that is connected.
void handleUser(void * v_usr){
std::set<int> allowedTracks;
user * usr = (user*)v_usr;
thisStream->addUser(usr);
#if DEBUG >= 5
std::cerr << "Thread launched for user " << usr->sID << ", socket number " << usr->S.getSocket() << std::endl;
#endif
#if defined(_TTHREAD_POSIX_) && defined(WITH_THREADNAMES) && !(defined(__FreeBSD__) || defined(__APPLE__) || defined(__MACH__) || defined(_WIN32) || defined(__CYGWIN__))
pthread_setname_np(pthread_self(), usr->sID.c_str());
#endif
usr->myRing = thisStream->getRing();
thisStream->sendMeta(usr->S);
while (usr->S.connected()){
if (usr->myRing->playCount){
if (usr->myRing->waiting){
Stream::get()->waitForData();
if ( !Stream::get()->isNewest(usr->myRing->b, allowedTracks)){
usr->myRing->waiting = false;
usr->myRing->b = Stream::get()->getNext(usr->myRing->b, allowedTracks);
if ((Stream::get()->getPacket(usr->myRing->b).isMember("keyframe") && (usr->myRing->playCount > 0)) || (usr->playUntil && usr->playUntil <= Stream::get()->getPacket(usr->myRing->b)["time"].asInt())){
usr->myRing->playCount--;
if (usr->myRing->playCount < 1 || usr->playUntil <= Stream::get()->getPacket(usr->myRing->b)["time"].asInt()){
usr->myRing->playCount = 0;
JSON::Value pausemark;
pausemark["trackid"] = 0ll;
pausemark["mark"] = "pause";
pausemark["time"] = Stream::get()->getPacket(usr->myRing->b)["time"].asInt();
pausemark.sendTo(usr->S);
}
}
}
}else{
//complete a send
Stream::get()->sendPacket(usr->myRing->b, usr->S);
if ( !usr->S.connected()){break;}
//switch to next buffer
if (Stream::get()->isNewest(usr->myRing->b, allowedTracks)){
//no next buffer? go in waiting mode.
usr->myRing->waiting = true;
}else{
usr->myRing->b = Stream::get()->getNext(usr->myRing->b, allowedTracks);
if ((Stream::get()->getPacket(usr->myRing->b).isMember("keyframe") && (usr->myRing->playCount > 0)) || (usr->playUntil && usr->playUntil <= Stream::get()->getPacket(usr->myRing->b)["time"].asInt())){
usr->myRing->playCount--;
if (usr->myRing->playCount < 1 || usr->playUntil <= Stream::get()->getPacket(usr->myRing->b)["time"].asInt()){
usr->myRing->playCount = 0;
JSON::Value pausemark;
pausemark["trackid"] = 0ll;
pausemark["mark"] = "pause";
pausemark["time"] = Stream::get()->getPacket(usr->myRing->b)["time"].asInt();
pausemark.sendTo(usr->S);
}
}
}
}
}
if (usr->S.spool()){
while (usr->S.Received().size()){
//delete anything that doesn't end with a newline
if ( !usr->S.Received().get().empty() && *(usr->S.Received().get().rbegin()) != '\n'){
usr->S.Received().get().clear();
continue;
}
usr->S.Received().get().resize(usr->S.Received().get().size() - 1);
if ( !usr->S.Received().get().empty()){
switch (usr->S.Received().get()[0]){
case 'P': { //Push
if (thisStream->checkWaitingIP(usr->S.Received().get().substr(2))){
usr->S.Received().get().clear();
Socket::Connection tmp = usr->S;
usr->S = Socket::Connection( -1);
thisStream->removeUser(usr);
thisStream->dropRing(usr->myRing);
delete usr;
return handlePushIn(tmp);
}else{
usr->Disconnect("Push denied - invalid IP address!");
}
break;
}
case 'S': { //Stats
usr->tmpStats = Stats(usr->S.Received().get().substr(2));
unsigned int secs = usr->tmpStats.conntime - usr->lastStats.conntime;
if (secs < 1){
secs = 1;
}
usr->curr_up = (usr->tmpStats.up - usr->lastStats.up) / secs;
usr->curr_down = (usr->tmpStats.down - usr->lastStats.down) / secs;
usr->lastStats = usr->tmpStats;
thisStream->saveStats(usr->sID, usr->tmpStats);
thisStream->sendMeta(usr->S);
break;
}
case 't': {
if (usr->S.Received().get().size() >= 3){
allowedTracks.clear();
std::string tmp = usr->S.Received().get().substr(2);
while (tmp != ""){
allowedTracks.insert(atoi(tmp.substr(0,tmp.find(' ')).c_str()));
if (tmp.find(' ') != std::string::npos){
tmp.erase(0,tmp.find(' ')+1);
}else{
tmp = "";
}
}
}
break;
}
case 's': { //second-seek
unsigned int ms = JSON::Value(usr->S.Received().get().substr(2)).asInt();
usr->myRing->waiting = false;
usr->myRing->starved = false;
usr->myRing->b = thisStream->msSeek(ms, allowedTracks);
if (usr->myRing->playCount > 0){
usr->myRing->playCount = 0;
}
break;
}
case 'p': { //play
usr->myRing->playCount = -1;
if (usr->S.Received().get().size() >= 2){
usr->playUntil = atoi(usr->S.Received().get().substr(2).c_str());
}else{
usr->playUntil = 0;
}
break;
}
case 'o': { //once-play
if (usr->myRing->playCount >= 0){
usr->myRing->playCount++;
}
break;
}
case 'q': { //quit-playing
usr->myRing->playCount = 0;
break;
}
}
usr->S.Received().get().clear();
}
}
}
if (usr->myRing->waiting || !usr->myRing->playCount){
Util::sleep(300); //sleep 300ms
}
}
usr->Disconnect("Socket closed.");
thisStream->dropRing(usr->myRing);
thisStream->removeUser(usr);
delete usr;
}
///\brief Starts a loop, waiting for connections to send data to.
///\param argc The number of arguments to the program.
///\param argv The arguments to the program.
///\return The return code of the buffer.
int Start(int argc, char ** argv){
Util::Config conf = Util::Config(argv[0], PACKAGE_VERSION);
conf.addOption("stream_name",
JSON::fromString("{\"arg_num\":1, \"arg\":\"string\", \"help\":\"Name of the stream this buffer will be providing.\"}"));
conf.addOption("awaiting_ip",
JSON::fromString(
"{\"arg_num\":2, \"arg\":\"string\", \"default\":\"\", \"help\":\"IP address to expect incoming data from. This will completely disable reading from standard input if used.\"}"));
conf.addOption("reportstats",
JSON::fromString("{\"default\":0, \"help\":\"Report stats to a controller process.\", \"short\":\"s\", \"long\":\"reportstats\"}"));
conf.addOption("time",
JSON::fromString(
"{\"default\":20000, \"arg\": \"integer\", \"help\":\"Buffer a specied amount of time in ms.\", \"short\":\"t\", \"long\":\"time\"}"));
conf.parseArgs(argc, argv);
std::string name = conf.getString("stream_name");
SS = Util::Stream::makeLive(name);
if ( !SS.connected()){
perror("Could not create stream socket");
return 1;
}
SS.setBlocking(false);
conf.activate();
#if defined(_TTHREAD_POSIX_) && defined(WITH_THREADNAMES) && !(defined(__FreeBSD__) || defined(__APPLE__) || defined(__MACH__) || defined(_WIN32) || defined(__CYGWIN__))
pthread_setname_np(pthread_self(), "Main accepter");
#endif
thisStream = Stream::get();
thisStream->setName(name);
thisStream->setBufferTime(conf.getInteger("time"));
Socket::Connection incoming;
Socket::Connection std_input(fileno(stdin));
if (conf.getBool("reportstats")){
tthread::thread StatsThread(handleStats, 0);
StatsThread.detach();
}
std::string await_ip = conf.getString("awaiting_ip");
if (await_ip == ""){
tthread::thread StdinThread(handleStdin, 0);
StdinThread.detach();
}else{
thisStream->setWaitingIP(await_ip);
}
unsigned int userId = 0;
SS.setBlocking(true);
while (buffer_running && SS.connected() && conf.is_active){
//check for new connections, accept them if there are any
//starts a thread for every accepted connection
incoming = SS.accept(true);
if (incoming.connected()){
tthread::thread thisUser(handleUser, (void *)new user(incoming, ++userId));
thisUser.detach();
}
} //main loop
// disconnect listener
buffer_running = false;
SS.close();
delete thisStream;
return 0;
}
} //Buffer namespace
///\brief Entry point for Buffer, simply calls Buffer::Start().
int main(int argc, char ** argv){
return Buffer::Start(argc, argv);
} //main

View file

@ -1,350 +0,0 @@
/// \file buffer_stream.cpp
/// Contains definitions for buffer streams.
#include "buffer_stream.h"
#include <mist/timing.h>
#include <mist/defines.h>
#include <stdlib.h>
namespace Buffer {
static JSON::Value ctrl_log;
void Stream::Log(std::string type, std::string message){
JSON::Value l;
l.append(type);
l.append(message);
ctrl_log.append(l);
}
/// Stores the singleton reference.
Stream * Stream::ref = 0;
/// Returns a reference to the singleton instance of this class.
/// \return A reference to the class.
Stream * Stream::get(){
static tthread::mutex creator;
if (ref == 0){
//prevent creating two at the same time
creator.lock();
if (ref == 0){
ref = new Stream();
ref->metadata.live = true;
}
creator.unlock();
}
return ref;
}
/// Creates a new DTSC::Stream object, private function so only one instance can exist.
Stream::Stream() : DTSC::Stream(5){}
/// Do cleanup on delete.
Stream::~Stream(){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
if (users.size() > 0){
for (usersIt = users.begin(); usersIt != users.end(); usersIt++){
if (( * *usersIt).S.connected()){
( * *usersIt).S.close();
}
}
}
moreData.notify_all();
}
/// Calculate and return the current statistics.
/// \return The current statistics in JSON format.
std::string & Stream::getStats(){
static std::string ret;
long long int now = Util::epoch();
unsigned int tot_up = 0, tot_down = 0, tot_count = 0;
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
if (users.size() > 0){
for (usersIt = users.begin(); usersIt != users.end(); usersIt++){
tot_down += ( * *usersIt).curr_down;
tot_up += ( * *usersIt).curr_up;
tot_count++;
}
}
Storage["totals"]["down"] = tot_down;
Storage["totals"]["up"] = tot_up;
Storage["totals"]["count"] = tot_count;
Storage["totals"]["now"] = now;
Storage["buffer"] = name;
rw_mutex.lock();
Storage["meta"] = metadata.toJSON();
rw_mutex.unlock();
if (Storage["meta"].isMember("tracks")){
for (JSON::ObjIter oIt = Storage["meta"]["tracks"].ObjBegin(); oIt != Storage["meta"]["tracks"].ObjEnd(); ++oIt){
oIt->second.removeMember("fragments");
oIt->second.removeMember("keys");
oIt->second.removeMember("parts");
oIt->second.removeMember("idheader");
oIt->second.removeMember("commentheader");
}
}
Storage["ctrl_log"] = ctrl_log;
ctrl_log.null();
ret = Storage.toString();
Storage["log"].null();
return ret;
}
/// Set the IP address to accept push data from.
/// \param ip The new IP to accept push data from.
void Stream::setWaitingIP(std::string ip){
waiting_ip = ip;
}
///\brief Check if this is the IP address to accept push data from.
///\param push_request The IP address to check, followed by a space and the password to check.
///\return True if it is the correct address or password, false otherwise.
bool Stream::checkWaitingIP(std::string push_request){
std::string ip = push_request.substr(0, push_request.find(' '));
std::string pass = push_request.substr(push_request.find(' ') + 1);
if (waiting_ip.length() > 0 && waiting_ip[0] == '@'){
if (pass == waiting_ip.substr(1)){
return true;
}else{
Log("BUFF", "Push to stream " + name + " denied, incorrect password: "+pass);
return false;
}
}else{
if (ip == waiting_ip || ip == "::ffff:" + waiting_ip){
return true;
}else{
Log("BUFF", "Push to stream " + name + " denied, wrong IP: "+ip+" != (::ffff:)"+waiting_ip);
return false;
}
}
}
/// Stores intermediate statistics.
/// \param username The name of the user.
/// \param stats The final statistics to store.
void Stream::saveStats(std::string username, Stats & stats){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
Storage["curr"][username]["connector"] = stats.connector;
Storage["curr"][username]["up"] = stats.up;
Storage["curr"][username]["down"] = stats.down;
Storage["curr"][username]["conntime"] = stats.conntime;
Storage["curr"][username]["host"] = stats.host;
Storage["curr"][username]["start"] = Util::epoch() - stats.conntime;
}
/// Stores final statistics.
/// \param username The name of the user.
/// \param stats The final statistics to store.
/// \param reason The reason for disconnecting.
void Stream::clearStats(std::string username, Stats & stats, std::string reason){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
if (Storage["curr"].isMember(username)){
Storage["curr"].removeMember(username);
#if DEBUG >= 4
std::cout << "Disconnected user " << username << ": " << reason << ". " << stats.connector << " transferred " << stats.up << " up and "
<< stats.down << " down in " << stats.conntime << " seconds to " << stats.host << std::endl;
#endif
}
Storage["log"][username]["connector"] = stats.connector;
Storage["log"][username]["up"] = stats.up;
Storage["log"][username]["down"] = stats.down;
Storage["log"][username]["conntime"] = stats.conntime;
Storage["log"][username]["host"] = stats.host;
Storage["log"][username]["start"] = Util::epoch() - stats.conntime;
}
/// The deletion callback override that will disconnect users
/// whom are currently receiving a tag that is being deleted.
void Stream::deletionCallback(DTSC::livePos deleting){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
for (usersIt = users.begin(); usersIt != users.end(); usersIt++){
if ((*usersIt)->myRing->playCount && (*usersIt)->myRing->b == deleting){
(*usersIt)->Disconnect("Buffer underrun");
}
}
}
/// Sets the buffer name.
/// \param n The new name of the buffer.
void Stream::setName(std::string n){
name = n;
}
void Stream::sendPacket(DTSC::livePos & num, Socket::Connection & S){
rw_mutex.lock();
if (!getPacket(num) && buffers.size()){
DEBUG_MSG(DLVL_DEVEL, "Oh noes, ran out of packets! Resetting to beginning...");
num = buffers.rbegin()->first;
}
getPacket(num).sendTo(S);
rw_mutex.unlock();
}
/// parsePacket override that will lock the rw_mutex during parsing.
bool Stream::parsePacket(std::string & buffer){
rw_mutex.lock();
bool ret = DTSC::Stream::parsePacket(buffer);
rw_mutex.unlock();
if (ret){
rw_change.notify_all();
moreData.notify_all();
}
return ret;
}
/// getNext override that will lock the rw_mutex during checking.
DTSC::livePos Stream::getNext(DTSC::livePos & pos, std::set<int> & allowedTracks){
tthread::lock_guard<tthread::mutex> guard(rw_mutex);
return DTSC::Stream::getNext(pos, allowedTracks);
}
/// endStream override that will lock the rw_mutex
void Stream::endStream(){
tthread::lock_guard<tthread::mutex> guard(rw_mutex);
return DTSC::Stream::endStream();
}
/// Removes a track and all related buffers from the stream.
void Stream::removeTrack(int trackId){
rw_mutex.lock();
metadata.tracks.erase(trackId);
rw_mutex.unlock();
std::set<DTSC::livePos> toDelete;
for (std::map<DTSC::livePos, JSON::Value >::iterator it = buffers.begin(); it != buffers.end(); it++){
if (it->first.trackID == (unsigned long long int)trackId){
toDelete.insert(it->first);
}
}
while (toDelete.size()){
deletionCallback(*toDelete.begin());
buffers.erase(*toDelete.begin());
keyframes[trackId].erase(*toDelete.begin());
toDelete.erase(toDelete.begin());
}
}
/// Calls removeTrack on all tracks that were streaming from this socket number.
void Stream::removeSocket(int sockNo){
std::set<int> toDelete;
std::map<int,DTSC::Track>::iterator it;
rw_mutex.lock();
for (it = metadata.tracks.begin(); it != metadata.tracks.end(); ++it){
if ((it->first & (sockNo << 16)) == (sockNo << 16)){
toDelete.insert(it->first);
Log("BUFF", "Stream "+name+" lost input for track: "+ it->second.getIdentifier());
}
}
rw_mutex.unlock();
while (toDelete.size()){
removeTrack(*toDelete.begin());
toDelete.erase(toDelete.begin());
}
}
/// parsePacket override that will lock the rw_mutex during parsing.
bool Stream::parsePacket(Socket::Connection & c){
bool ret = false;
if (!c.spool()){
return ret;
}
rw_mutex.lock();
while (DTSC::Stream::parsePacket(c.Received())){
ret = true;
}
rw_mutex.unlock();
if (ret){
rw_change.notify_all();
moreData.notify_all();
}
return ret;
}
/// Metadata sender that locks the rw_mutex during sending.
void Stream::sendMeta(Socket::Connection & s){
if (metadata){
rw_mutex.lock();
DTSC::Meta tmpMeta = metadata;
rw_mutex.unlock();
tmpMeta.send(s);
}
}
/// Add a user to the userlist.
/// \param newUser The user to be added.
void Stream::addUser(user * newUser){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
users.insert(newUser);
}
/// Removes a user from the userlist.
/// \param oldUser The user to be removed.
void Stream::removeUser(user * oldUser){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
users.erase(oldUser);
}
/// Blocks the thread until new data is available.
void Stream::waitForData(){
tthread::lock_guard<tthread::recursive_mutex> guard(stats_mutex);
moreData.wait(stats_mutex);
}
///Creates a new user from a newly connected socket.
///Also prints "User connected" text to stdout.
///\param fd A connection to the user.
///\param ID Unique ID of the user.
user::user(Socket::Connection fd, long long ID){
sID = JSON::Value(ID).asString();
S = fd;
curr_up = 0;
curr_down = 0;
myRing = 0;
} //constructor
///Disconnects the current user. Doesn't do anything if already disconnected.
///Prints "Disconnected user" to stdout if disconnect took place.
///\param reason The reason for disconnecting the user.
void user::Disconnect(std::string reason){
S.close();
Stream::get()->clearStats(sID, lastStats, reason);
} //Disconnect
///Default stats constructor.
///Should not be used.
Stats::Stats(){
up = 0;
down = 0;
conntime = 0;
}
///Stats constructor reading a string.
///Reads a stats string and parses it to the internal representation.
///\param s The string of stats.
Stats::Stats(std::string s){
size_t f = s.find(' ');
if (f != std::string::npos){
host = s.substr(0, f);
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
connector = s.substr(0, f);
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
conntime = atoi(s.substr(0, f).c_str());
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
up = atoi(s.substr(0, f).c_str());
s.erase(0, f + 1);
down = atoi(s.c_str());
}
}
}

View file

@ -1,106 +0,0 @@
/// \file buffer_stream.h
/// Contains definitions for buffer streams.
#pragma once
#include <string>
#include <mist/dtsc.h>
#include <mist/json.h>
#include <mist/socket.h>
#include <mist/tinythread.h>
namespace Buffer {
/// Converts a stats line to up, down, host, connector and conntime values.
class Stats{
public:
unsigned int up;///<The amount of bytes sent upstream.
unsigned int down;///<The amount of bytes received downstream.
std::string host;///<The connected host.
std::string connector;///<The connector the user is connected with.
unsigned int conntime;///<The amount of time the user is connected.
Stats(std::string s);
Stats();
};
///\brief Keeps track of connected users.
///
///Keeps track of which buffer the user currently uses,
///and its connection status.
class user{
public:
DTSC::Ring * myRing; ///< Ring of the buffer for this user.
unsigned int playUntil; ///< Time until where is being played or zero if undefined.
Stats lastStats; ///< Holds last known stats for this connection.
Stats tmpStats; ///< Holds temporary stats for this connection.
std::string sID; ///< Holds the connection ID.
unsigned int curr_up; ///< Holds the current estimated transfer speed up.
unsigned int curr_down; ///< Holds the current estimated transfer speed down.
Socket::Connection S; ///< Connection to user
/// Creates a new user from a newly connected socket.
user(Socket::Connection fd, long long int ID);
/// Disconnects the current user. Doesn't do anything if already disconnected.
void Disconnect(std::string reason);
};
/// Keeps track of a single streams inputs and outputs, taking care of thread safety and all other related issues.
class Stream : public DTSC::Stream{
public:
/// Get a reference to this Stream object.
static Stream * get();
/// Get the current statistics in JSON format.
std::string & getStats();
/// Set the IP address to accept push data from.
void setWaitingIP(std::string ip);
/// Check if this is the IP address to accept push data from.
bool checkWaitingIP(std::string ip);
/// Sets the current socket for push data.
bool setInput(Socket::Connection S);
/// Gets the current socket for push data.
Socket::Connection & getIPInput();
/// Send a packet while locking the mutex.
void sendPacket(DTSC::livePos & num, Socket::Connection & S);
/// Stores intermediate statistics.
void saveStats(std::string username, Stats & stats);
/// Stores final statistics.
void clearStats(std::string username, Stats & stats, std::string reason);
/// Sets the buffer name.
void setName(std::string n);
/// Add a user to the userlist.
void addUser(user * newUser);
/// Delete a user from the userlist.
void removeUser(user * oldUser);
/// Blocks the thread until new data is available.
void waitForData();
/// Sends the metadata to a specific socket
void sendMeta(Socket::Connection & s);
/// Cleanup function
~Stream();
/// Removes a track and all related buffers from the stream.
void removeTrack(int trackId);
/// Calls removeTrack on all tracks that were streaming from this socket number.
void removeSocket(int sockNo);
/// Thread-safe parsePacket override.
bool parsePacket(std::string & buffer);
/// Thread-safe parsePacket override.
bool parsePacket(Socket::Connection & c);
/// Logs a message to the controller.
void Log(std::string type, std::string message);
DTSC::livePos getNext(DTSC::livePos & pos, std::set<int> & allowedTracks);
void endStream();
private:
void deletionCallback(DTSC::livePos deleting);
tthread::mutex rw_mutex; ///< Mutex for read/write locking.
tthread::condition_variable rw_change; ///< Triggered when reader/writer count changes.
static Stream * ref;
Stream();
JSON::Value Storage; ///< Global storage of data.
std::string waiting_ip; ///< IP address for media push.
Socket::Connection ip_input; ///< Connection used for media push.
tthread::recursive_mutex stats_mutex; ///< Mutex for stats/users modifications.
std::set<user*> users; ///< All connected users.
std::set<user*>::iterator usersIt; ///< Iterator for all connected users.
std::string name; ///< Name for this buffer.
tthread::condition_variable moreData; ///< Triggered when more data becomes available.
};
}
;

View file

@ -1,276 +0,0 @@
/// \file player.cpp
/// Holds all code for the MistPlayer application used for VoD streams.
#include <iostream>//for std::cerr
#include <stdio.h> //for fileno
#include <stdlib.h> //for atoi
#include <sys/time.h>
#include <mist/dtsc.h>
#include <mist/json.h>
#include <mist/config.h>
#include <mist/socket.h>
#include <mist/timing.h>
#include <mist/procs.h>
#include <mist/stream.h>
#include <mist/defines.h>
//under cygwin, recv blocks for ~15ms if no data is available.
//This is a hack to keep performance decent with that bug present.
#ifdef __CYGWIN__
#define CYG_DEFI int cyg_count;
#define CYG_INCR cyg_count++;
#define CYG_LOOP (cyg_count % 20 == 0) &&
#else
#define CYG_DEFI
#define CYG_INCR
#define CYG_LOOP
#endif
///Converts a stats line to up, down, host, connector and conntime values.
class Stats{
public:
unsigned int up;///<The amount of bytes sent upstream.
unsigned int down;///<The amount of bytes received downstream.
std::string host;///<The connected host.
std::string connector;///<The connector the user is connected with.
unsigned int conntime;///<The amount of time the user is connected.
///\brief Default stats constructor.
///
///Should not be used.
Stats(){
up = 0;
down = 0;
conntime = 0;
}
;
///\brief Stats constructor reading a string.
///
///Reads a stats string and parses it to the internal representation.
///\param s The string of stats.
Stats(std::string s){
size_t f = s.find(' ');
if (f != std::string::npos){
host = s.substr(0, f);
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
connector = s.substr(0, f);
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
conntime = atoi(s.substr(0, f).c_str());
s.erase(0, f + 1);
}
f = s.find(' ');
if (f != std::string::npos){
up = atoi(s.substr(0, f).c_str());
s.erase(0, f + 1);
down = atoi(s.c_str());
}
}
};
std::string intToBin(long long int number){
std::string result;
result.resize(8);
for (int i = 7; i >= 0; i--){
result[i] = number & 0xFF;
number >>= 8;
}
return result;
}
int main(int argc, char** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
conf.addOption("filename",
JSON::fromString("{\"arg_num\":1, \"help\":\"Name of the file to write to stdout.\"}"));
conf.addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
conf.parseArgs(argc, argv);
conf.activate();
int playing = 0;
Socket::Connection in_out = Socket::Connection(fileno(stdout), fileno(stdin));
DTSC::File source = DTSC::File(conf.getString("filename"));
if ( !source.getMeta().isFixed()){
std::cerr << "Encountered a non-fixed file." << std::endl;
return 1;
}
std::string streamname = conf.getString("streamname");
source.getMeta().send(in_out);
JSON::Value pausemark;
pausemark["trackid"] = 0ll;
pausemark["mark"] = "pause";
pausemark["time"] = 0ll;
Socket::Connection StatsSocket = Socket::Connection(Util::getTmpFolder() + "statistics", true);
int lastSent = Util::epoch(); //time last packet was sent
JSON::Value last_pack;
bool meta_sent = false;
int playUntil = -1;
long long max_lead_time = 7500;//maximum time in ms that the player can be faster than real-time
long long now, prevTimestamp = 0; //for timing of sending packets
std::set<int> newSelect;
Stats sts;
CYG_DEFI
while (in_out.connected() && (Util::epoch() - lastSent < 60) && conf.is_active){
CYG_INCR
if (CYG_LOOP in_out.spool()){
while (in_out.Received().size()){
//delete anything that doesn't end with a newline
if ( *(in_out.Received().get().rbegin()) != '\n'){
in_out.Received().get().clear();
continue;
}
in_out.Received().get().resize(in_out.Received().get().size() - 1);
if ( !in_out.Received().get().empty()){
DEBUG_MSG(DLVL_HIGH, "Player received: %s", in_out.Received().get().c_str());
switch (in_out.Received().get()[0]){
case 'P': { //Push
#if DEBUG >= 4
std::cerr << "Received push - ignoring (" << in_out.Received().get() << ")" << std::endl;
#endif
in_out.close(); //pushing to VoD makes no sense
break;
}
case 'S': { //Stats
if ( !StatsSocket.connected()){
StatsSocket = Socket::Connection(Util::getTmpFolder() + "statistics", true);
}
if (StatsSocket.connected()){
sts = Stats(in_out.Received().get().substr(2));
JSON::Value json_sts;
json_sts["vod"]["down"] = (long long int)sts.down;
json_sts["vod"]["up"] = (long long int)sts.up;
json_sts["vod"]["time"] = (long long int)sts.conntime;
json_sts["vod"]["host"] = sts.host;
json_sts["vod"]["connector"] = sts.connector;
json_sts["vod"]["filename"] = conf.getString("filename");
json_sts["vod"]["now"] = Util::epoch();
json_sts["vod"]["start"] = Util::epoch() - sts.conntime;
if ( !meta_sent){
json_sts["vod"]["meta"] = source.getMeta().toJSON();
json_sts["vod"]["meta"]["is_fixed"] = 1;
for (JSON::ObjIter oIt = json_sts["vod"]["meta"]["tracks"].ObjBegin(); oIt != json_sts["vod"]["meta"]["tracks"].ObjEnd(); oIt++){
oIt->second.removeMember("keys");
oIt->second.removeMember("fragments");
oIt->second.removeMember("parts");
}
meta_sent = true;
}
StatsSocket.SendNow(json_sts.toString());
StatsSocket.SendNow("\n\n", 2);
StatsSocket.flush();
}
break;
}
case 's': { //second-seek
int ms = JSON::Value(in_out.Received().get().substr(2)).asInt();
source.seek_time(ms);
lastSent = Util::epoch();
prevTimestamp = 0;
playUntil = 0;
break;
}
case 'p': { //play
playing = -1;
lastSent = Util::epoch();
in_out.setBlocking(false);
prevTimestamp = 0;
if (in_out.Received().get().size() >= 2){
playUntil = atoi(in_out.Received().get().substr(2).c_str());
}else{
playUntil = 0;
}
break;
}
case 'o': { //once-play
if (playing <= 0){
playing = 1;
}
prevTimestamp = 0;
++playing;
in_out.setBlocking(false);
break;
}
case 'q': { //quit-playing
if (playing != 0){
DEBUG_MSG(DLVL_HIGH, "Pausemark sent");
pausemark["time"] = source.getJSON()["time"];
pausemark.sendTo(in_out);
}
playing = 0;
in_out.setBlocking(true);
break;
}
case 't': {
newSelect.clear();
std::string tmp = in_out.Received().get().substr(2);
while (tmp != ""){
newSelect.insert(atoi(tmp.substr(0,tmp.find(' ')).c_str()));
if (tmp.find(' ') != std::string::npos){
tmp.erase(0,tmp.find(' ')+1);
}else{
tmp = "";
}
}
source.selectTracks(newSelect);
break;
}
#if DEBUG >= 4
default: {
std::cerr << "MistPlayer received an unknown command: " << in_out.Received().get() << std::endl;
break;
}
#endif
}
in_out.Received().get().clear();
}
}
}
if (playing != 0){
now = Util::getMS();
source.seekNext();
if ( !source.getJSON()){
DEBUG_MSG(DLVL_HIGH, "Seek failed (end of file?) - stopping playback");
playing = 0;
}
if (playing > 0 && source.atKeyframe()){
--playing;
}
if (prevTimestamp == 0){
prevTimestamp = now - source.getJSON()["time"].asInt();
}
if (playing == -1 && playUntil == 0 && source.getJSON()["time"].asInt() > now - prevTimestamp + max_lead_time){
Util::sleep(source.getJSON()["time"].asInt() - (now - prevTimestamp + max_lead_time));
}
if ( playUntil && playUntil <= source.getJSON()["time"].asInt()){
playing = 0;
}
if (playing == 0){
DEBUG_MSG(DLVL_HIGH, "Pausemark sent");
pausemark["time"] = source.getJSON()["time"];
pausemark.sendTo(in_out);
in_out.setBlocking(true);
}else{
lastSent = Util::epoch();
DEBUG_MSG(DLVL_HIGH, "Playing %lliT%lli", source.getJSON()["trackid"].asInt(), source.getJSON()["time"].asInt());
source.getJSON().sendTo(in_out);
}
}else{
Util::sleep(10);
}
}
StatsSocket.close();
in_out.close();
return 0;
}

View file

@ -25,6 +25,7 @@
#include "embed.js.h"
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
@ -115,14 +116,15 @@ namespace Connector_HTTP {
///Displays a friendly error message.
///\param H The request that was being handled upon timeout.
///\param conn The connection to the client that issued the request.
///\param msg The message to print to the client.
///\return A timestamp indicating when the request was parsed.
long long int proxyHandleTimeout(HTTP::Parser & H, Socket::Connection & conn){
long long int proxyHandleTimeout(HTTP::Parser & H, Socket::Connection & conn, std::string msg){
H.Clean();
H.SetHeader("Server", "mistserver/" PACKAGE_VERSION "/" + Util::Config::libver);
H.SetBody(
"<!DOCTYPE html><html><head><title>Gateway timeout</title></head><body><h1>Gateway timeout</h1>Though the server understood your request and attempted to handle it, somehow handling it took longer than it should. Your request has been cancelled - please try again later.</body></html>");
"<!DOCTYPE html><html><head><title>"+msg+"</title></head><body><h1>"+msg+"</h1>Though the server understood your request and attempted to handle it, somehow handling it took longer than it should. Your request has been cancelled - please try again later.</body></html>");
long long int ret = Util::getMS();
conn.SendNow(H.BuildResponse("504", "Gateway Timeout"));
conn.SendNow(H.BuildResponse("504", msg));
return ret;
}
@ -404,6 +406,7 @@ namespace Connector_HTTP {
H.Clean();
ConnConn * myCConn = 0;
unsigned int counter = 0;
//loop until a connection is available/created
while (!myCConn){
//lock the connection mutex before trying anything
@ -412,6 +415,12 @@ namespace Connector_HTTP {
if ( !connectorConnections.count(uid)){
connectorConnections[uid] = new ConnConn(new Socket::Connection(Util::getTmpFolder() + connector));
connectorConnections[uid]->conn->setBlocking(false); //do not block on spool() with no data
if (!connectorConnections[uid]->conn->spool() && !connectorConnections[uid]->conn){
//unlock the connection mutex before exiting
connMutex.unlock();
DEBUG_MSG(DLVL_FAIL, "Created new connection (%s) failed - aborting request!", uid.c_str());
return Util::getMS();
}
DEBUG_MSG(DLVL_HIGH, "Created new connection %s", uid.c_str());
}
@ -420,11 +429,17 @@ namespace Connector_HTTP {
myCConn = connectorConnections[uid];
//if the connection is dead, delete it and re-loop
if (!myCConn->conn->spool() && !myCConn->conn->connected()){
counter++;
DEBUG_MSG(DLVL_HIGH, "Resetting existing connection %s", uid.c_str());
connectorConnections.erase(uid);
myCConn->inUse.unlock();
delete myCConn;
myCConn = 0;
if (counter++ > 2){
connMutex.unlock();
DEBUG_MSG(DLVL_FAIL, "Created new connection (%s) failed - aborting request!", uid.c_str());
return Util::getMS();
}
}else{
DEBUG_MSG(DLVL_HIGH, "Using active connection %s", uid.c_str());
}
@ -477,7 +492,7 @@ namespace Connector_HTTP {
myCConn->inUse.unlock();
//unset to only read headers
H.headerOnly = false;
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Timeout: fragment too new");
}
myCConn->lastUse = 0;
timeout = 0;
@ -495,9 +510,9 @@ namespace Connector_HTTP {
myCConn->inUse.unlock();
//unset to only read headers
H.headerOnly = false;
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Gateway timeout while waiting for response");
}else{
Util::sleep(5);
Util::sleep(100);
}
}
//unset to only read headers
@ -506,7 +521,7 @@ namespace Connector_HTTP {
//failure, disconnect and sent error to user
myCConn->conn->close();
myCConn->inUse.unlock();
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Gateway connection dropped");
}else{
long long int ret = Util::getMS();
//success, check type of response
@ -699,6 +714,14 @@ int main(int argc, char ** argv){
Connector_HTTP::capabilities.removeMember((*it).substr(8));
}
}
if ((*it).substr(0, 7) == "MistOut"){
arg_one = Util::getMyPath() + (*it);
conn_args[0] = arg_one.c_str();
Connector_HTTP::capabilities[(*it).substr(7)] = JSON::fromString(Util::Procs::getOutputOf((char**)conn_args));
if (Connector_HTTP::capabilities[(*it).substr(7)].size() < 1){
Connector_HTTP::capabilities.removeMember((*it).substr(7));
}
}
}
return conf.serveThreadedSocket(Connector_HTTP::proxyHandleHTTPConnection);

View file

@ -1,333 +0,0 @@
/// \file conn_http_dynamic.cpp
/// Contains the main code for the HTTP Dynamic Connector
#include <iostream>
#include <sstream>
#include <queue>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/base64.h>
#include <mist/amf.h>
#include <mist/mp4.h>
#include <mist/mp4_adobe.h>
#include <mist/config.h>
#include <sstream>
#include <mist/stream.h>
#include <mist/timing.h>
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
std::set<int> videoTracks;///<< Holds valid video tracks for playback
long long int audioTrack = 0;///<< Holds audio track ID for playback
void getTracks(DTSC::Meta & metadata){
videoTracks.clear();
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6"){
videoTracks.insert(it->first);
}
if (it->second.codec == "AAC" || it->second.codec == "MP3"){
audioTrack = it->first;
}
}
}
///\brief Builds a bootstrap for use in HTTP Dynamic streaming.
///\param streamName The name of the stream.
///\param trackMeta The current metadata of this track, used to generate the index.
///\param isLive Whether or not the stream is live.
///\param fragnum The index of the current fragment.
///\return The generated bootstrap.
std::string dynamicBootstrap(std::string & streamName, DTSC::Track & trackMeta, bool isLive = false, int fragnum = 0){
std::string empty;
MP4::ASRT asrt;
asrt.setUpdate(false);
asrt.setVersion(1);
//asrt.setQualityEntry(empty, 0);
if (isLive){
asrt.setSegmentRun(1, 4294967295ul, 0);
}else{
asrt.setSegmentRun(1, trackMeta.keys.size(), 0);
}
MP4::AFRT afrt;
afrt.setUpdate(false);
afrt.setVersion(1);
afrt.setTimeScale(1000);
//afrt.setQualityEntry(empty, 0);
MP4::afrt_runtable afrtrun;
int i = 0;
for (std::deque<DTSC::Key>::iterator it = trackMeta.keys.begin(); it != trackMeta.keys.end(); it++){
if (it->getLength()){
afrtrun.firstFragment = it->getNumber();
afrtrun.firstTimestamp = it->getTime();
afrtrun.duration = it->getLength();
afrt.setFragmentRun(afrtrun, i);
i++;
}
}
MP4::ABST abst;
abst.setVersion(1);
abst.setBootstrapinfoVersion(1);
abst.setProfile(0);
abst.setUpdate(false);
abst.setTimeScale(1000);
abst.setLive(isLive);
abst.setCurrentMediaTime(trackMeta.lastms);
abst.setSmpteTimeCodeOffset(0);
abst.setMovieIdentifier(streamName);
abst.setSegmentRunTable(asrt, 0);
abst.setFragmentRunTable(afrt, 0);
#if DEBUG >= 8
std::cout << "Sending bootstrap:" << std::endl << abst.toPrettyString(0) << std::endl;
#endif
return std::string((char*)abst.asBox(), (int)abst.boxedSize());
}
///\brief Builds an index file for HTTP Dynamic streaming.
///\param streamName The name of the stream.
///\param metadata The current metadata, used to generate the index.
///\return The index file for HTTP Dynamic Streaming.
std::string dynamicIndex(std::string & streamName, DTSC::Meta & metadata){
if ( !audioTrack){getTracks(metadata);}
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-8\"?>" << std::endl;
Result << " <manifest xmlns=\"http://ns.adobe.com/f4m/1.0\">" << std::endl;
Result << " <id>" << streamName << "</id>" << std::endl;
Result << " <mimeType>video/mp4</mimeType>" << std::endl;
Result << " <deliveryType>streaming</deliveryType>" << std::endl;
if (metadata.vod){
Result << " <duration>" << metadata.tracks[*videoTracks.begin()].lastms / 1000 << ".000</duration>" << std::endl;
Result << " <streamType>recorded</streamType>" << std::endl;
}else{
Result << " <duration>0.00</duration>" << std::endl;
Result << " <streamType>live</streamType>" << std::endl;
}
for (std::set<int>::iterator it = videoTracks.begin(); it != videoTracks.end(); it++){
Result << " <bootstrapInfo "
"profile=\"named\" "
"id=\"boot" << (*it) << "\" "
"url=\"" << (*it) << ".abst\">"
"</bootstrapInfo>" << std::endl;
}
for (std::set<int>::iterator it = videoTracks.begin(); it != videoTracks.end(); it++){
Result << " <media "
"url=\"" << (*it) << "-\" "
"bitrate=\"" << metadata.tracks[(*it)].bps * 8 << "\" "
"bootstrapInfoId=\"boot" << (*it) << "\" "
"width=\"" << metadata.tracks[(*it)].width << "\" "
"height=\"" << metadata.tracks[(*it)].height << "\">" << std::endl;
Result << " <metadata>AgAKb25NZXRhRGF0YQMAAAk=</metadata>" << std::endl;
Result << " </media>" << std::endl;
}
Result << "</manifest>" << std::endl;
#if DEBUG >= 8
std::cerr << "Sending this manifest:" << std::endl << Result.str() << std::endl;
#endif
return Result.str();
} //BuildManifest
///\brief Main function for the HTTP Dynamic Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int dynamicConnector(Socket::Connection & conn){
FLV::Tag tmp; //temporary tag
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender.
Socket::Connection ss( -1);
std::string streamname;
bool handlingRequest = false;
int Quality = 0;
int ReqFragment = -1;
long long mstime = 0;
long long mslen = 0;
unsigned int lastStats = 0;
conn.setBlocking(false); //do not block on conn.spool() when no data is available
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
HTTP_S.SendResponse("404", "Not found", conn);
continue;
}
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".abst") != std::string::npos){
std::string streamID = HTTP_R.url.substr(streamname.size() + 10);
streamID = streamID.substr(0, streamID.find(".abst"));
HTTP_S.Clean();
HTTP_S.SetBody(dynamicBootstrap(streamname, Strm.metadata.tracks[atoll(streamID.c_str())], Strm.metadata.live));
HTTP_S.SetHeader("Content-Type", "binary/octet");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SendResponse("200", "OK", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}
if (HTTP_R.url.find("f4m") == std::string::npos){
std::string tmp_qual = HTTP_R.url.substr(HTTP_R.url.find("/", 10) + 1);
Quality = atoi(tmp_qual.substr(0, tmp_qual.find("Seg") - 1).c_str());
int temp;
temp = HTTP_R.url.find("Seg") + 3;
temp = HTTP_R.url.find("Frag") + 4;
ReqFragment = atoi(HTTP_R.url.substr(temp).c_str());
#if DEBUG >= 5
printf("Video track %d, fragment %d\n", Quality, ReqFragment);
#endif
if (!audioTrack){getTracks(Strm.metadata);}
DTSC::Track & vidTrack = Strm.metadata.tracks[Quality];
mstime = 0;
mslen = 0;
for (std::deque<DTSC::Key>::iterator it = vidTrack.keys.begin(); it != vidTrack.keys.end(); it++){
if (it->getNumber() >= ReqFragment){
mstime = it->getTime();
mslen = it->getLength();
if (Strm.metadata.live){
if (it == vidTrack.keys.end() - 2){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
HTTP_S.SendResponse("208", "Ask again later", conn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment " << ReqFragment << " not available yet" << std::endl;
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){}
}
}
}
break;
}
}
if (HTTP_R.url == "/"){continue;}//Don't continue, but continue instead.
if (Strm.metadata.live){
if (mstime == 0 && ReqFragment > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
HTTP_S.SendResponse("412", "Fragment out of range", conn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment " << ReqFragment << " too old" << std::endl;
continue;
}
}
std::stringstream sstream;
sstream << "t " << Quality << " " << audioTrack << "\ns " << mstime << "\np " << (mstime + mslen) << "\n";
ss.SendNow(sstream.str().c_str());
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, conn);
//send the bootstrap
std::string bootstrap = dynamicBootstrap(streamname, Strm.metadata.tracks[Quality], Strm.metadata.live, ReqFragment);
HTTP_S.Chunkify(bootstrap, conn);
//send a zero-size mdat, meaning it stretches until end of file.
HTTP_S.Chunkify("\000\000\000\000mdat", 8, conn);
//send init data, if needed.
if (audioTrack > 0){
tmp.DTSCAudioInit(Strm.metadata.tracks[audioTrack]);
tmp.tagTime(mstime);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
if (Quality > 0){
tmp.DTSCVideoInit(Strm.metadata.tracks[Quality]);
tmp.tagTime(mstime);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
handlingRequest = true;
}else{
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SetBody(dynamicIndex(streamname, Strm.metadata));
HTTP_S.SendResponse("200", "OK", conn);
}
HTTP_R.Clean(); //clean for any possible next requests
}else{
//sleep for 250ms before next attempt
Util::sleep(250);
}
}
if (ss.connected()){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
}
if (handlingRequest && ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
//send an empty chunk to signify request is done
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){
//send a chunk with the new data
tmp.DTSCLoader(Strm);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
}
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Connector_HTTP_Dynamic main function
} //Connector_HTTP_Dynamic namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Adobe-specific dynamic streaming (also known as HDS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/dynamic/$/manifest.f4m";
capa["url_prefix"] = "/dynamic/$/";
capa["socket"] = "http_dynamic";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/11";
capa["methods"][0u]["priority"] = 7ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::dynamicConnector);
} //main

View file

@ -1,200 +0,0 @@
///\file conn_http_json.cpp
///\brief Contains the main code for the HTTP JSON Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <iomanip>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int JSONConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
std::stringstream jsondata;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_byte = start * 1000; //ms, not s
}else{
seek_byte = start * 1000; //divide by 1mbit, then *1000 for ms.
}
// ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
jsondata.clear();
jsondata << "[";
//we are ready, connect the socket!
if ( !ss.connected()){
ss = Util::Stream::getStream(streamname);
}
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
//ready4data = false;
inited = false;
continue;
}
//wait until we have a header
while ( !Strm.metadata && ss.connected()){
if (ss.spool()){
Strm.parsePacket(ss.Received()); //read the metadata
}else{
Util::sleep(5);
}
}
seek_sec = seek_byte;
std::stringstream cmd;
cmd << "t";
int tid = -1;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (it->second.type == "meta" ){
if (tid == -1){
tid = it->second.trackID;
}
cmd << " " << it->second.trackID;
}
}
if( cmd.str() == "t" ){
cmd.str("");
cmd.clear();
}
int maxTime = Strm.metadata.tracks[tid].lastms;
cmd << "\ns " << seek_sec << "\np " << maxTime << "\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
}
if (inited){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_JSON").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if(Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "application/json"); //Send the correct content-type for FLV files
jsondata << "]";
HTTP_S.SetBody(jsondata.str());
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
inited = false;
jsondata.str(""); // totally do this
jsondata.clear();
break;
}
if (jsondata.str().length() > 1){
jsondata << ",";
}
jsondata << Strm.getPacket().toString();
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_JSON").c_str());
ss.close();
return 0;
} //SRT main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol JSON streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.json";
capa["url_match"] = "/$.json";
capa["url_handler"] = "http";
capa["url_type"] = "json";
capa["socket"] = "http_json";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::JSONConnector);
} //main

View file

@ -1,354 +0,0 @@
/// \file conn_http_dynamic.cpp
/// Contains the main code for the HTTP Dynamic Connector
#include <iostream>
#include <iomanip>
#include <sstream>
#include <queue>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
#include <mist/config.h>
#include <sstream>
#include <mist/stream.h>
#include <mist/timing.h>
#include <mist/ts_packet.h>
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Builds an index file for HTTP Live streaming.
///\param metadata The current metadata, used to generate the index.
///\param isLive Whether or not the stream is live.
///\return The index file for HTTP Live Streaming.
std::string liveIndex(DTSC::Meta & metadata, bool isLive){
std::stringstream result;
result << "#EXTM3U\r\n";
int audioId = -1;
std::string audioName;
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "AAC"){
audioId = it->first;
audioName = it->second.getIdentifier();
break;
}
}
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "H264"){
int bWidth = it->second.bps * 2;
if (audioId != -1){
bWidth += metadata.tracks[audioId].bps * 2;
}
result << "#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=" << bWidth * 10 << "\r\n";
result << it->first;
if (audioId != -1){
result << "_" << audioId;
}
result << "/index.m3u8\r\n";
}
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
}
std::string liveIndex(DTSC::Track & metadata, bool isLive){
std::stringstream result;
//parse single track
int longestFragment = 0;
for (std::deque<DTSC::Fragment>::iterator it = metadata.fragments.begin(); (it + 1) != metadata.fragments.end(); it++){
if (it->getDuration() > longestFragment){
longestFragment = it->getDuration();
}
}
result << "#EXTM3U\r\n"
"#EXT-X-TARGETDURATION:" << (longestFragment / 1000) + 1 << "\r\n"
"#EXT-X-MEDIA-SEQUENCE:" << metadata.missedFrags << "\r\n";
for (std::deque<DTSC::Fragment>::iterator it = metadata.fragments.begin(); it != metadata.fragments.end(); it++){
long long int starttime = metadata.getKey(it->getNumber()).getTime();
if (it != (metadata.fragments.end() - 1)){
result << "#EXTINF:" << ((it->getDuration() + 500) / 1000) << ", no desc\r\n" << starttime << "_" << it->getDuration() + starttime << ".ts\r\n";
}
}
if ( !isLive){
result << "#EXT-X-ENDLIST\r\n";
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
} //liveIndex
///\brief Main function for the HTTP Live Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int liveConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender.
bool ready4data = false; //Set to true when streaming is to begin.
bool AppleCompat = false; //Set to true when Apple device detected.
Socket::Connection ss( -1);
std::string streamname;
bool handlingRequest = false;
std::string recBuffer = "";
TS::Packet PackData;
int PacketNumber = 0;
long long unsigned int TimeStamp = 0;
unsigned int ThisNaluSize;
char VideoCounter = 0;
char AudioCounter = 0;
long long unsigned int lastVid = 0;
bool IsKeyFrame = false;
MP4::AVCC avccbox;
bool haveAvcc = false;
std::vector<int> fragIndices;
std::string manifestType;
int Segment = -1;
int temp;
int trackID = 0;
int audioTrackID = 0;
unsigned int lastStats = 0;
conn.setBlocking(false); //do not block on conn.spool() when no data is available
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
AppleCompat = (HTTP_R.GetHeader("User-Agent").find("Apple") != std::string::npos);
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".m3u") == std::string::npos){
temp = HTTP_R.url.find("/", 5) + 1;
std::string allTracks = HTTP_R.url.substr(temp, HTTP_R.url.find("/", temp) - temp);
trackID = atoi(allTracks.c_str());
audioTrackID = atoi(allTracks.substr(allTracks.find("_")+1).c_str());
temp = HTTP_R.url.find("/", temp) + 1;
Segment = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find("_", temp) - temp).c_str());
lastVid = Segment * 90;
temp = HTTP_R.url.find("_", temp) + 1;
int frameCount = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find(".ts", temp) - temp).c_str());
if (Strm.metadata.live){
int seekable = Strm.canSeekms(Segment);
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << Segment << " too old" << std::endl;
continue;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << Segment << " not available yet" << std::endl;
continue;
}
}
for (unsigned int i = 0; i < allTracks.size(); i++){
if (allTracks[i] == '_'){
allTracks[i] = ' ';
}
}
std::stringstream sstream;
sstream << "t " << allTracks << "\n";
sstream << "s " << Segment << "\n";
sstream << "p " << frameCount << "\n";
ss.SendNow(sstream.str().c_str());
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp2t");
HTTP_S.StartResponse(HTTP_R, conn);
handlingRequest = true;
}else{
std::string request = HTTP_R.url.substr(HTTP_R.url.find("/", 5) + 1);
if (HTTP_R.url.find(".m3u8") != std::string::npos){
manifestType = "audio/x-mpegurl";
}else{
manifestType = "audio/mpegurl";
}
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", manifestType);
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest;
if (request.find("/") == std::string::npos){
manifest = liveIndex(Strm.metadata, Strm.metadata.live);
}else{
int selectId = atoi(request.substr(0,request.find("/")).c_str());
manifest = liveIndex(Strm.metadata.tracks[selectId], Strm.metadata.live);
}
HTTP_S.SetBody(manifest);
conn.SendNow(HTTP_S.BuildResponse("200", "OK"));
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}else{
Util::sleep(250);
}
}
if (ready4data){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Live").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
if ( !haveAvcc){
avccbox.setPayload(Strm.metadata.tracks[trackID].init);
haveAvcc = true;
}
if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){
Socket::Buffer ToPack;
//write PAT and PMT TS packets
if (PacketNumber % 42 == 0){
PackData.DefaultPAT();
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PackData.DefaultPMT();
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber += 2;
}
int PIDno = 0;
char * ContCounter = 0;
if (Strm.lastType() == DTSC::VIDEO){
IsKeyFrame = Strm.getPacket().isMember("keyframe");
if (IsKeyFrame){
TimeStamp = (Strm.getPacket()["time"].asInt() * 27000);
}
ToPack.append(avccbox.asAnnexB());
while (Strm.lastData().size() > 4){
ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3];
Strm.lastData().replace(0, 4, "\000\000\000\001", 4);
if (ThisNaluSize + 4 == Strm.lastData().size()){
ToPack.append(Strm.lastData());
break;
}else{
ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4);
Strm.lastData().erase(0, ThisNaluSize + 4);
}
}
ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &VideoCounter;
}else if (Strm.lastType() == DTSC::AUDIO){
ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata.tracks[audioTrackID].init));
ToPack.append(Strm.lastData());
if (AppleCompat){
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), lastVid));
}else{
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90));
}
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &AudioCounter;
IsKeyFrame = false;
}
//initial packet
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(TimeStamp);
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber++;
//rest of packets
while (ToPack.size()){
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
toSend = PackData.AddStuffing(ToPack.bytes(184));
gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber++;
}
}
}
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Live").c_str());
ss.close();
#if DEBUG >= 5
fprintf(stderr, "HLS: User %i disconnected.\n", conn.getSocket());
#endif
return 0;
} //HLS_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Apple-specific streaming (also known as HLS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/hls/$/index.m3u8";
capa["url_prefix"] = "/hls/$/";
capa["socket"] = "http_live";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.apple.mpegurl";
capa["methods"][0u]["priority"] = 9ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::liveConnector);
} //main

View file

@ -1,217 +0,0 @@
///\file conn_http_progressive_flv.cpp
///\brief Contains the main code for the HTTP Progressive FLV Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
FLV::Tag tag;//Temporary tag buffer.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
int videoID = -1;
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_sec = start * 1000; //ms, not s
seek_byte = 0;
}else{
seek_byte = start; //divide by 1mbit, then *1000 for ms.
seek_sec = 0;
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
int byterate = 0;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6")){
videoID = it->second.trackID;
}
if (audioID == -1 && (it->second.codec == "AAC" || it->second.codec == "MP3")){
audioID = it->second.trackID;
}
}
if (videoID != -1){
byterate += Strm.metadata.tracks[videoID].bps;
}
if (audioID != -1){
byterate += Strm.metadata.tracks[audioID].bps;
}
if ( !byterate){byterate = 1;}
if (seek_byte){
seek_sec = (seek_byte / byterate) * 1000;
}
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\ns " << seek_sec << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_FLV"));
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/x-flv"); //Send the correct content-type for FLV files
//HTTP_S.SetHeader("Transfer-Encoding", "chunked");
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
conn.SendNow(FLV::Header, 13); //write FLV header
//write metadata
tag.DTSCMetaInit(Strm, Strm.metadata.tracks[videoID], Strm.metadata.tracks[audioID]);
conn.SendNow(tag.data, tag.len);
//write video init data, if needed
if (videoID != -1){
tag.DTSCVideoInit(Strm.metadata.tracks[videoID]);
conn.SendNow(tag.data, tag.len);
}
//write audio init data, if needed
if (audioID != -1){
tag.DTSCAudioInit(Strm.metadata.tracks[audioID]);
conn.SendNow(tag.data, tag.len);
}
progressive_has_sent_header = true;
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
std::string codec = Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].codec;
if (codec == "AAC" || codec == "MP3" || codec == "H264" || codec == "H263" || codec == "VP6"){
tag.DTSCLoader(Strm);
conn.SendNow(tag.data, tag.len); //write the tag contents
}
}
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Progressive_FLV").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.flv";
capa["url_match"] = "/$.flv";
capa["socket"] = "http_progressive_flv";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/7";
capa["methods"][0u]["priority"] = 5ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,184 +0,0 @@
///\file conn_http_progressive_mp3.cpp
///\brief Contains the main code for the HTTP Progressive MP3 Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
FLV::Tag tag;//Temporary tag buffer.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_sec = start * 1000; //ms, not s
}else{
seek_byte = start; //divide by 1mbit, then *1000 for ms.
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
int byterate = 0;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (audioID == -1 && it->second.codec == "MP3"){
audioID = it->second.trackID;
}
}
if (audioID != -1){
byterate += Strm.metadata.tracks[audioID].bps;
}
if ( !byterate){byterate = 1;}
if (seek_byte){
seek_sec = (seek_byte / byterate) * 1000;
}
std::stringstream cmd;
cmd << "t";
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\ns " << seek_sec << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "audio/mpeg"); //Send the correct content-type for MP3 files
//HTTP_S.SetHeader("Transfer-Encoding", "chunked");
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
progressive_has_sent_header = true;
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
if (Strm.lastType() == DTSC::AUDIO){
conn.SendNow(Strm.lastData()); //write the MP3 contents
}
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["codecs"][0u][0u].append("MP3");
capa["url_rel"] = "/$.mp3";
capa["url_match"] = "/$.mp3";
capa["socket"] = "http_progressive_mp3";
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "mp3";
capa["methods"][0u]["priority"] = 8ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,656 +0,0 @@
///\file conn_http_progressive_mp4.cpp
///\brief Contains the main code for the HTTP Progressive MP4 Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
#include <mist/defines.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
struct keyPart{
public:
bool operator < (const keyPart& rhs) const {
if (time < rhs.time){
return true;
}
if (time == rhs.time){
if (trackID < rhs.trackID){
return true;
}
}
return false;
}
long unsigned int trackID;
long unsigned int size;
long long unsigned int time;
long long unsigned int endTime;
long unsigned int index;
};
std::string DTSCMeta2MP4Header(DTSC::Meta & metaData, std::set<int> & tracks, long long & size){
std::stringstream header;
//ftyp box
MP4::FTYP ftypBox;
header << std::string(ftypBox.asBox(),ftypBox.boxedSize());
uint64_t mdatSize = 0;
//moov box
MP4::MOOV moovBox;
unsigned int moovOffset = 0;
{
//calculating longest duration
long long int firstms = -1;
long long int lastms = -1;
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
if (lastms == -1 || lastms < metaData.tracks[*it].lastms){
lastms = metaData.tracks[*it].lastms;
}
if (firstms == -1 || firstms > metaData.tracks[*it].firstms){
firstms = metaData.tracks[*it].firstms;
}
}
MP4::MVHD mvhdBox(lastms - firstms);
moovBox.setContent(mvhdBox, moovOffset++);
}
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
MP4::TRAK trakBox;
{
{
MP4::TKHD tkhdBox(*it, metaData.tracks[*it].lastms - metaData.tracks[*it].firstms, metaData.tracks[*it].width, metaData.tracks[*it].height);
trakBox.setContent(tkhdBox, 0);
}{
MP4::MDIA mdiaBox;
unsigned int mdiaOffset = 0;
{
MP4::MDHD mdhdBox(metaData.tracks[*it].lastms - metaData.tracks[*it].firstms);
mdiaBox.setContent(mdhdBox, mdiaOffset++);
}//MDHD box
{
MP4::HDLR hdlrBox(metaData.tracks[*it].type, metaData.tracks[*it].getIdentifier());
mdiaBox.setContent(hdlrBox, mdiaOffset++);
}//hdlr box
{
MP4::MINF minfBox;
unsigned int minfOffset = 0;
if (metaData.tracks[*it].type== "video"){
MP4::VMHD vmhdBox;
vmhdBox.setFlags(1);
minfBox.setContent(vmhdBox,minfOffset++);
}else if (metaData.tracks[*it].type == "audio"){
MP4::SMHD smhdBox;
minfBox.setContent(smhdBox,minfOffset++);
}//type box
{
MP4::DINF dinfBox;
MP4::DREF drefBox;
dinfBox.setContent(drefBox,0);
minfBox.setContent(dinfBox,minfOffset++);
}//dinf box
{
MP4::STBL stblBox;
unsigned int offset = 0;
{
MP4::STSD stsdBox;
stsdBox.setVersion(0);
if (metaData.tracks[*it].type == "video"){//boxname = codec
MP4::VisualSampleEntry vse;
if (metaData.tracks[*it].codec == "H264"){
vse.setCodec("avc1");
}
vse.setDataReferenceIndex(1);
vse.setWidth(metaData.tracks[*it].width);
vse.setHeight(metaData.tracks[*it].height);
MP4::AVCC avccBox;
avccBox.setPayload(metaData.tracks[*it].init);
vse.setCLAP(avccBox);
stsdBox.setEntry(vse,0);
}else if(metaData.tracks[*it].type == "audio"){//boxname = codec
MP4::AudioSampleEntry ase;
if (metaData.tracks[*it].codec == "AAC"){
ase.setCodec("mp4a");
ase.setDataReferenceIndex(1);
}
ase.setSampleRate(metaData.tracks[*it].rate);
ase.setChannelCount(metaData.tracks[*it].channels);
ase.setSampleSize(metaData.tracks[*it].size);
//MP4::ESDS esdsBox(metaData.tracks[*it].init, metaData.tracks[*it].bps);
MP4::ESDS esdsBox;
//outputting these values first, so malloc isn't called as often.
esdsBox.setESHeaderStartCodes(metaData.tracks[*it].init);
esdsBox.setSLValue(2);
esdsBox.setESDescriptorTypeLength(32+metaData.tracks[*it].init.size());
esdsBox.setESID(2);
esdsBox.setStreamPriority(0);
esdsBox.setDecoderConfigDescriptorTypeLength(18 + metaData.tracks[*it].init.size());
esdsBox.setByteObjectTypeID(0x40);
esdsBox.setStreamType(5);
esdsBox.setReservedFlag(1);
esdsBox.setBufferSize(1250000);
esdsBox.setMaximumBitRate(10000000);
esdsBox.setAverageBitRate(metaData.tracks[*it].bps * 8);
esdsBox.setConfigDescriptorTypeLength(5);
esdsBox.setSLConfigDescriptorTypeTag(0x6);
esdsBox.setSLConfigExtendedDescriptorTypeTag(0x808080);
esdsBox.setSLDescriptorTypeLength(1);
ase.setCodecBox(esdsBox);
stsdBox.setEntry(ase,0);
}
stblBox.setContent(stsdBox,offset++);
}//stsd box
{
MP4::STTS sttsBox;
sttsBox.setVersion(0);
if (metaData.tracks[*it].parts.size()){
for (unsigned int part = 0; part < metaData.tracks[*it].parts.size(); part++){
MP4::STTSEntry newEntry;
newEntry.sampleCount = 1;
newEntry.sampleDelta = metaData.tracks[*it].parts[part].getDuration();
sttsBox.setSTTSEntry(newEntry, part);
}
}
stblBox.setContent(sttsBox,offset++);
}//stts box
if (metaData.tracks[*it].type == "video"){
//STSS Box here
MP4::STSS stssBox;
stssBox.setVersion(0);
int tmpCount = 1;
int tmpItCount = 0;
for ( std::deque< DTSC::Key>::iterator tmpIt = metaData.tracks[*it].keys.begin(); tmpIt != metaData.tracks[*it].keys.end(); tmpIt ++) {
stssBox.setSampleNumber(tmpCount,tmpItCount);
tmpCount += tmpIt->getParts();
tmpItCount ++;
}
stblBox.setContent(stssBox,offset++);
}//stss box
{
MP4::STSC stscBox;
stscBox.setVersion(0);
MP4::STSCEntry stscEntry;
stscEntry.firstChunk = 1;
stscEntry.samplesPerChunk = 1;
stscEntry.sampleDescriptionIndex = 1;
stscBox.setSTSCEntry(stscEntry, 0);
stblBox.setContent(stscBox,offset++);
}//stsc box
{
uint32_t total = 0;
MP4::STSZ stszBox;
stszBox.setVersion(0);
total = 0;
for (std::deque< DTSC::Part>::iterator partIt = metaData.tracks[*it].parts.begin(); partIt != metaData.tracks[*it].parts.end(); partIt ++) {
stszBox.setEntrySize(partIt->getSize(), total);//in bytes in file
size += partIt->getSize();
total++;
}
stblBox.setContent(stszBox,offset++);
}//stsz box
//add STCO boxes here
{
MP4::STCO stcoBox;
stcoBox.setVersion(1);
//Inserting empty values on purpose here, will be fixed later.
if (metaData.tracks[*it].parts.size() != 0){
stcoBox.setChunkOffset(0, metaData.tracks[*it].parts.size() - 1);//this inserts all empty entries at once
}
stblBox.setContent(stcoBox,offset++);
}//stco box
minfBox.setContent(stblBox,minfOffset++);
}//stbl box
mdiaBox.setContent(minfBox, mdiaOffset++);
}//minf box
trakBox.setContent(mdiaBox, 1);
}
}//trak Box
moovBox.setContent(trakBox, moovOffset++);
}
//initial offset length ftyp, length moov + 8
unsigned long long int byteOffset = ftypBox.boxedSize() + moovBox.boxedSize() + 8;
//update all STCO from the following map;
std::map <int, MP4::STCO> checkStcoBoxes;
//for all tracks
for (unsigned int i = 1; i < moovBox.getContentCount(); i++){
//10 lines to get the STCO box.
MP4::TRAK checkTrakBox;
MP4::Box checkMdiaBox;
MP4::Box checkTkhdBox;
MP4::MINF checkMinfBox;
MP4::STBL checkStblBox;
//MP4::STCO checkStcoBox;
checkTrakBox = ((MP4::TRAK&)moovBox.getContent(i));
for (unsigned int j = 0; j < checkTrakBox.getContentCount(); j++){
if (checkTrakBox.getContent(j).isType("mdia")){
checkMdiaBox = checkTrakBox.getContent(j);
break;
}
if (checkTrakBox.getContent(j).isType("tkhd")){
checkTkhdBox = checkTrakBox.getContent(j);
}
}
for (unsigned int j = 0; j < ((MP4::MDIA&)checkMdiaBox).getContentCount(); j++){
if (((MP4::MDIA&)checkMdiaBox).getContent(j).isType("minf")){
checkMinfBox = ((MP4::MINF&)((MP4::MDIA&)checkMdiaBox).getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkMinfBox.getContentCount(); j++){
if (checkMinfBox.getContent(j).isType("stbl")){
checkStblBox = ((MP4::STBL&)checkMinfBox.getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkStblBox.getContentCount(); j++){
if (checkStblBox.getContent(j).isType("stco")){
checkStcoBoxes.insert( std::pair<int, MP4::STCO>(((MP4::TKHD&)checkTkhdBox).getTrackID(), ((MP4::STCO&)checkStblBox.getContent(j)) ));
break;
}
}
}
//inserting right values in the STCO box header
//total = 0;
long long unsigned int totalByteOffset = 0;
//Current values are actual byte offset without header-sized offset
std::set <keyPart> sortSet;//filling sortset for interleaving parts
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = metaData.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = metaData.tracks[*subIt].firstms + metaData.tracks[*subIt].parts[0].getDuration();
temp.size = metaData.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
while (!sortSet.empty()){
//setting the right STCO size in the STCO box
checkStcoBoxes[sortSet.begin()->trackID].setChunkOffset(totalByteOffset + byteOffset, sortSet.begin()->index);
totalByteOffset += sortSet.begin()->size;
//add keyPart to sortSet
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < metaData.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + metaData.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = metaData.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
mdatSize = totalByteOffset+8;
header << std::string(moovBox.asBox(),moovBox.boxedSize());
header << (char)((mdatSize>>24) & 0xFF) << (char)((mdatSize>>16) & 0xFF) << (char)((mdatSize>>8) & 0xFF) << (char)(mdatSize & 0xFF) << "mdat";
//end of header
size += header.str().size();
return header.str();
}
/// Calculate a seekPoint, based on byteStart, metadata, tracks and headerSize.
/// The seekPoint will be set to the timestamp of the first packet to send.
void findSeekPoint(long long byteStart, long long & seekPoint, DTSC::Meta & metadata, std::set<int> & tracks, unsigned int headerSize){
seekPoint = 0;
//if we're starting in the header, seekPoint is always zero.
if (byteStart <= headerSize){return;}
//okay, we're past the header. Substract the headersize from the starting postion.
byteStart -= headerSize;
//initialize a list of sorted parts that this file contains
std::set <keyPart> sortSet;
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = metadata.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = metadata.tracks[*subIt].firstms + metadata.tracks[*subIt].parts[0].getDuration();
temp.size = metadata.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
//forward through the file by headers, until we reach the point where we need to be
while (!sortSet.empty()){
//substract the size of this fragment from byteStart
byteStart -= sortSet.begin()->size;
//if that put us past the point where we wanted to be, return right now
if (byteStart < 0){return;}
//otherwise, set seekPoint to where we are now
seekPoint = sortSet.begin()->time;
//then find the next part
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < metadata.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + metadata.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = metadata.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
//If we're here, we're in the last fragment.
//That's technically legal, of course.
}
/// Parses a "Range: " header, setting byteStart, byteEnd and seekPoint using data from metadata and tracks to do
/// the calculations.
/// On error, byteEnd is set to zero.
void parseRange(std::string header, long long & byteStart, long long & byteEnd, long long & seekPoint, DTSC::Meta & metadata, std::set<int> & tracks, unsigned int headerSize){
if (header.size() < 6 || header.substr(0, 6) != "bytes="){
byteEnd = 0;
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
return;
}
header.erase(0, 6);
if (header.size() && header[0] == '-'){
//negative range = count from end
byteStart = 0;
for (unsigned int i = 1; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (byteStart > byteEnd){
//entire file if starting before byte zero
byteStart = 0;
DEBUG_MSG(DLVL_DEVEL, "Full negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}else{
//start byteStart bytes before byteEnd
byteStart = byteEnd - byteStart;
DEBUG_MSG(DLVL_DEVEL, "Partial negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}
}else{
long long size = byteEnd;
byteEnd = 0;
byteStart = 0;
unsigned int i = 0;
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (header[i] != '-'){
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
byteEnd = 0;
return;
}
++i;
if (i < header.size()){
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteEnd *= 10;
byteEnd += header[i] - '0';
continue;
}
break;
}
if (byteEnd > size-1){byteEnd = size;}
}else{
byteEnd = size;
}
DEBUG_MSG(DLVL_DEVEL, "Range request: %lli-%lli (%s)", byteStart, byteEnd, header.c_str());
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}
}//parseRange
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
long long byteStart = 0;
long long leftOver = 0;
long long currPos = 0;
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
std::set <keyPart> sortSet;//filling sortset for interleaving parts
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
DEBUG_MSG(DLVL_DEVEL, "Received request: %s", HTTP_R.getUrl().c_str());
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if (!ss){
ss = Util::Stream::getStream(streamname);
if (ss){
Strm.waitForMeta(ss);
}
if (!ss){
DEBUG_MSG(DLVL_FAIL, "Could not connect to stream %s!", streamname.c_str());
ss.close();
HTTP_S.Clean();
HTTP_R.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
HTTP_S.SendResponse("404", "Not found", conn);
continue;
}
}
int videoID = -1;
int audioID = -1;
if (HTTP_R.GetVar("audio") != ""){
audioID = JSON::Value(HTTP_R.GetVar("audio")).asInt();
}
if (HTTP_R.GetVar("video") != ""){
videoID = JSON::Value(HTTP_R.GetVar("video")).asInt();
}
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && it->second.type == "video" && it->second.codec == "H264"){
videoID = it->first;
}
if (audioID == -1 && it->second.type == "audio" && it->second.codec == "AAC"){
audioID = it->first;
}
}
std::set<int> tracks;
if (videoID > 0){tracks.insert(videoID);}
if (audioID > 0){tracks.insert(audioID);}
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/MP4"); //Send the correct content-type for MP4 files
HTTP_S.SetHeader("Accept-Ranges", "bytes, parsec");
long long size = 0;
std::string headerData = DTSCMeta2MP4Header(Strm.metadata, tracks, size);
byteStart = 0;
long long byteEnd = size-1;
long long seekPoint = 0;
if (HTTP_R.GetHeader("Range") != ""){
parseRange(HTTP_R.GetHeader("Range"), byteStart, byteEnd, seekPoint, Strm.metadata, tracks, headerData.size());
if (!byteEnd){
if (HTTP_R.GetHeader("Range")[0] == 'p'){
HTTP_S.SetBody("Starsystem not in communications range");
HTTP_S.SendResponse("416", "Starsystem not in communications range", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}else{
HTTP_S.SetBody("Requested Range Not Satisfiable");
HTTP_S.SendResponse("416", "Requested Range Not Satisfiable", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}
}else{
std::stringstream rangeReply;
rangeReply << "bytes " << byteStart << "-" << byteEnd << "/" << size;
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that are > 1MiB
if (byteEnd - byteStart + 1 > 1024*1024){
HTTP_S.SetHeader("MistMultiplex", "No");
}
HTTP_S.SetHeader("Content-Range", rangeReply.str());
/// \todo Switch to chunked?
HTTP_S.SendResponse("206", "Partial content", conn);
//HTTP_S.StartResponse("206", "Partial content", HTTP_R, conn);
}
}else{
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that aren't ranged
HTTP_S.SetHeader("MistMultiplex", "No");
/// \todo Switch to chunked?
HTTP_S.SendResponse("200", "OK", conn);
//HTTP_S.StartResponse(HTTP_R, conn);
}
leftOver = byteEnd - byteStart + 1;//add one byte, because range "0-0" = 1 byte of data
currPos = 0;
if (byteStart < (long long)headerData.size()){
/// \todo Switch to chunked?
//HTTP_S.Chunkify(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart, conn);//send MP4 header
conn.SendNow(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart);//send MP4 header
leftOver -= std::min((long long)headerData.size(), byteEnd) - byteStart;
}
currPos = headerData.size();//we're now guaranteed to be past the header point, no matter what
HTTP_R.Clean(); //clean for any possible next requests
{//using scope to have cmd not declared after action
std::stringstream cmd;
cmd << "t";
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
cmd << " " << *it;
}
cmd << "\ns " << seekPoint << "\np\n";
ss.SendNow(cmd.str());
}
sortSet.clear();
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = Strm.metadata.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = Strm.metadata.tracks[*subIt].firstms + Strm.metadata.tracks[*subIt].parts[0].getDuration();
temp.size = Strm.metadata.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
inited = true;
}
}else{
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_MP4").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}else if(Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
//keep track of where we are - fast-forward until where we are now
while (!sortSet.empty() && ((long long)sortSet.begin()->trackID != Strm.getPacket()["trackid"].asInt() || (long long)sortSet.begin()->time != Strm.getPacket()["time"].asInt())){
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < Strm.metadata.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + Strm.metadata.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = Strm.metadata.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
currPos += sortSet.begin()->size;
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
if (currPos >= byteStart){
sortSet.clear();//we don't need you anymore!
if (leftOver < (long long)Strm.lastData().size()){
conn.SendNow(Strm.lastData().data(), leftOver);
}else{
conn.SendNow(Strm.lastData());
}
//HTTP_S.Chunkify(Strm.lastData().data(), Strm.lastData().size(), conn);
leftOver -= Strm.lastData().size();
}else{
if (currPos + (long long)Strm.lastData().size() > byteStart){
conn.SendNow(Strm.lastData().data()+(byteStart-currPos), Strm.lastData().size()-(byteStart-currPos));
leftOver -= Strm.lastData().size()-(byteStart-currPos);
currPos = byteStart;
sortSet.clear();//we don't need you anymore!
}
}
if (leftOver < 1){
ss.SendNow("q\n");//stop playback
Strm.waitForPause(ss);//sync the stream
inited = false;
}
}
if (Strm.lastType() == DTSC::INVALID){
DEBUG_MSG(DLVL_FAIL, "Invalid packet received - closing connection");
conn.close();
}
}
}else{
Util::sleep(10);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Progressive_MP4").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.mp4";
capa["url_match"] = "/$.mp4";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/mp4";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
capa["socket"] = "http_progressive_mp4";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,186 +0,0 @@
///\file conn_http_progressive_ogg.cpp
///\brief Contains the main code for the HTTP Progressive OGG Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/ogg.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
#include "../converters/oggconv.h"
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
//OGG specific variables
//OGG::headerPages oggMeta;
//OGG::Page curOggPage;
OGG::converter oggConv;
std::map <long long unsigned int, std::vector<JSON::Value> > DTSCBuffer;
//std::map <long long unsigned int, long long unsigned int> prevGran;
std::vector<unsigned int> curSegTable;
std::string sendBuffer;
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
int videoID = -1;
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && it->second.codec == "theora"){
videoID = it->second.trackID;
}
if (audioID == -1 && it->second.codec == "vorbis"){
audioID = it->second.trackID;
}
}
if (videoID == -1 && audioID == -1){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("This stream contains no OGG compatible codecs");
HTTP_S.SendResponse("406", "Not acceptable",conn);
HTTP_R.Clean();
continue;
}
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_Ogg").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/ogg"); //Send the correct content-type for FLV files
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
//Fill in ogg header here
oggConv.readDTSCHeader(Strm.metadata);
conn.SendNow((char*)oggConv.parsedPages.c_str(), oggConv.parsedPages.size());
progressive_has_sent_header = true;
}
//parse DTSC to Ogg here
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
std::string tmpString;
oggConv.readDTSCVector(Strm.getPacket(), tmpString);
conn.SendNow(tmpString);
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
ss.close();
//last page output
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
}
}else{
Util::sleep(100);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.ogg";
capa["url_match"] = "/$.ogg";
capa["socket"] = "http_progressive_ogg";
capa["codecs"][0u][0u].append("theora");
capa["codecs"][0u][1u].append("vorbis");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/ogg";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,516 +0,0 @@
///\file conn_http_smooth.cpp
///\brief Contains the main code for the HTTP Smooth Connector
#include <iostream>
#include <iomanip>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/base64.h>
#include <mist/amf.h>
#include <mist/mp4.h>
#include <mist/mp4_ms.h>
#include <mist/mp4_generic.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
long long unsigned int binToInt(std::string & binary){
long long int result = 0;
for ( int i = 0; i < 8; i++){
result <<= 8;
result += binary[i];
}
return result;
}
std::string intToBin(long long unsigned int number){
std::string result;
result.resize(8);
for( int i = 7; i >= 0; i--){
result[i] = number & 0xFF;
number >>= 8;
}
return result;
}
std::string toUTF16(std::string original){
std::string result;
result += (char)0xFF;
result += (char)0xFE;
for (std::string::iterator it = original.begin(); it != original.end(); it++){
result += (*it);
result += (char)0x00;
}
return result;
}
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Builds an index file for HTTP Smooth streaming.
///\param metadata The current metadata, used to generate the index.
///\return The index file for HTTP Smooth Streaming.
std::string smoothIndex(DTSC::Meta & metadata){
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-16\"?>\n";
Result << "<SmoothStreamingMedia "
"MajorVersion=\"2\" "
"MinorVersion=\"0\" "
"TimeScale=\"10000000\" ";
std::deque<std::map<int,DTSC::Track>::iterator> audioIters;
std::deque<std::map<int,DTSC::Track>::iterator> videoIters;
long long int maxWidth = 0;
long long int maxHeight = 0;
long long int minWidth = 99999999;
long long int minHeight = 99999999;
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "AAC"){
audioIters.push_back(it);
}
if (it->second.type == "video" && it->second.codec == "H264"){
videoIters.push_back(it);
if (it->second.width > maxWidth){maxWidth = it->second.width;}
if (it->second.width < minWidth){minWidth = it->second.width;}
if (it->second.height > maxHeight){maxHeight = it->second.height;}
if (it->second.height < minHeight){minHeight = it->second.height;}
}
}
if (metadata.vod){
Result << "Duration=\"" << (*videoIters.begin())->second.lastms << "0000\"";
}else{
Result << "Duration=\"0\" "
"IsLive=\"TRUE\" "
"LookAheadFragmentCount=\"2\" "
"DVRWindowLength=\"" << metadata.bufferWindow << "0000\" "
"CanSeek=\"TRUE\" "
"CanPause=\"TRUE\" ";
}
Result << ">\n";
//Add audio entries
if (audioIters.size()){
Result << "<StreamIndex "
"Type=\"audio\" "
"QualityLevels=\"" << audioIters.size() << "\" "
"Name=\"audio\" "
"Chunks=\"" << (*audioIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/A({start time})\">\n";
int index = 0;
for (std::deque<std::map<int,DTSC::Track>::iterator>::iterator it = audioIters.begin(); it != audioIters.end(); it++){
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
for (unsigned int i = 0; i < (*it)->second.init.size(); i++){
Result << std::setfill('0') << std::setw(2) << std::right << (int)(*it)->second.init[i];
}
Result << std::dec << "\" "
"SamplingRate=\"" << (*it)->second.rate << "\" "
"Channels=\"2\" "
"BitsPerSample=\"16\" "
"PacketSize=\"4\" "
"AudioTag=\"255\" "
"FourCC=\"AACL\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*audioIters.begin())->second.keys.size()){
for (std::deque<DTSC::Key>::iterator it = (*audioIters.begin())->second.keys.begin(); it != (((*audioIters.begin())->second.keys.end()) - 1); it++){
Result << "<c ";
if (it == (*audioIters.begin())->second.keys.begin()){
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
//Add video entries
if (videoIters.size()){
Result << "<StreamIndex "
"Type=\"video\" "
"QualityLevels=\"" << videoIters.size() << "\" "
"Name=\"video\" "
"Chunks=\"" << (*videoIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/V({start time})\" "
"MaxWidth=\"" << maxWidth << "\" "
"MaxHeight=\"" << maxHeight << "\" "
"DisplayWidth=\"" << maxWidth << "\" "
"DisplayHeight=\"" << maxHeight << "\">\n";
int index = 0;
for (std::deque<std::map<int,DTSC::Track>::iterator>::iterator it = videoIters.begin(); it != videoIters.end(); it++){
//Add video qualities
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
MP4::AVCC avccbox;
avccbox.setPayload((*it)->second.init);
std::string tmpString = avccbox.asAnnexB();
for (unsigned int i = 0; i < tmpString.size(); i++){
Result << std::setfill('0') << std::setw(2) << std::right << (int)tmpString[i];
}
Result << std::dec << "\" "
"MaxWidth=\"" << (*it)->second.width << "\" "
"MaxHeight=\"" << (*it)->second.height << "\" "
"FourCC=\"AVC1\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*videoIters.begin())->second.keys.size()){
for (std::deque<DTSC::Key>::iterator it = (*videoIters.begin())->second.keys.begin(); it != (((*videoIters.begin())->second.keys.end()) - 1); it++){
Result << "<c ";
if (it == (*videoIters.begin())->second.keys.begin()){
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
Result << "</SmoothStreamingMedia>\n";
#if DEBUG >= 8
std::cerr << "Sending this manifest:" << std::endl << Result << std::endl;
#endif
return toUTF16(Result.str());
} //smoothIndex
///\brief Main function for the HTTP Smooth Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int smoothConnector(Socket::Connection & conn){
std::deque<std::string> dataBuffer;//A buffer for the data that needs to be sent to the client.
DTSC::Stream Strm;//Incoming stream buffer.
HTTP::Parser HTTP_R;//HTTP Receiver
HTTP::Parser HTTP_S;//HTTP Sender.
bool ready4data = false;//Set to true when streaming is to begin.
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
bool handlingRequest = false;
std::string Quality;//Indicates the request quality of the movie.
long long int requestedTime = -1;//Indicates the fragment requested.
std::string parseString;//A string used for parsing different aspects of the request.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
conn.setBlocking(false);//Set the client socket to non-blocking
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
//Get data set by the proxy.
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
//initiate Stream Socket
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".xap") != std::string::npos){
#include "xap.h"
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "application/siverlight");
HTTP_S.SetHeader("Cache-Control", "cache");
HTTP_S.SetBody("");
HTTP_S.SetHeader("Content-Length", xap_len);
HTTP_S.SendResponse("200", "OK", conn);
conn.SendNow((const char *)xap_data, xap_len);
}else{
if (HTTP_R.url.find("Manifest") == std::string::npos){
//We have a non-manifest request, parse it.
Quality = HTTP_R.url.substr(HTTP_R.url.find("TrackID=", 8) + 8);
Quality = Quality.substr(0, Quality.find(")"));
parseString = HTTP_R.url.substr(HTTP_R.url.find(")/") + 2);
parseString = parseString.substr(parseString.find("(") + 1);
requestedTime = atoll(parseString.substr(0, parseString.find(")")).c_str());
long long int selectedQuality = atoll(Quality.c_str());
DTSC::Track & myRef = Strm.metadata.tracks[selectedQuality];
if (Strm.metadata.live){
int seekable = Strm.canSeekms(requestedTime / 10000);
if (seekable == 0){
// iff the fragment in question is available, check if the next is available too
for (std::deque<DTSC::Key>::iterator it = myRef.keys.begin(); it != myRef.keys.end(); it++){
if (it->getTime() >= (requestedTime / 10000)){
if ((it + 1) == myRef.keys.end()){
seekable = 1;
}
break;
}
}
}
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << requestedTime / 10000 << "ms too old (" << myRef.keys.begin()->getTime() << " - " << myRef.keys.rbegin()->getTime() << " ms)" << std::endl;
continue;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << requestedTime / 10000 << "ms not available yet (" << myRef.keys.begin()->getTime() << " - " << myRef.keys.rbegin()->getTime() << " ms)" << std::endl;
continue;
}
}
//Seek to the right place and send a play-once for a single fragment.
std::stringstream sstream;
long long mstime = 0;
int partOffset = 0;
int keyDur = 0;
DTSC::Key keyObj;
for (std::deque<DTSC::Key>::iterator it = myRef.keys.begin(); it != myRef.keys.end(); it++){
if (it->getTime() >= (requestedTime / 10000)){
mstime = it->getTime();
keyObj = (*it);
std::deque<DTSC::Key>::iterator nextIt = it;
nextIt++;
if (nextIt != myRef.keys.end()){
keyDur = nextIt->getTime() - it->getTime();
}else{
keyDur = -1;
if (Strm.metadata.live){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment @ " << (requestedTime / 10000) << " not available yet" << std::endl;
}
}
break;
}
partOffset += it->getParts();
}
if (HTTP_R.url == "/"){continue;}//Don't continue, but continue instead.
if (Strm.metadata.live){
if (mstime == 0 && (requestedTime / 10000) > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << (requestedTime / 10000) << " too old" << std::endl;
continue;
}
}
sstream << "t " << myRef.trackID << "\n";
sstream << "s " << keyObj.getTime() << "\n";
if (keyDur != -1){
sstream << "p " << keyObj.getTime() + keyDur << "\n";
}else{
sstream << "p\n";
}
ss.SendNow(sstream.str().c_str());
//Wrap everything in mp4 boxes
MP4::MFHD mfhd_box;
mfhd_box.setSequenceNumber(((keyObj.getNumber() - 1) * 2) + myRef.trackID);
MP4::TFHD tfhd_box;
tfhd_box.setFlags(MP4::tfhdSampleFlag);
tfhd_box.setTrackID(myRef.trackID);
if (myRef.type == "video"){
tfhd_box.setDefaultSampleFlags(0x00004001);
}else{
tfhd_box.setDefaultSampleFlags(0x00008002);
}
MP4::TRUN trun_box;
trun_box.setDataOffset(42);
unsigned int keySize = 0;
if (myRef.type == "video"){
trun_box.setFlags(MP4::trundataOffset | MP4::trunfirstSampleFlags | MP4::trunsampleDuration | MP4::trunsampleSize | MP4::trunsampleOffsets);
}else{
trun_box.setFlags(MP4::trundataOffset | MP4::trunsampleDuration | MP4::trunsampleSize);
}
trun_box.setFirstSampleFlags(0x00004002);
for (int i = 0; i < keyObj.getParts(); i++){
MP4::trunSampleInformation trunSample;
trunSample.sampleSize = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getSize();
keySize += Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getSize();
trunSample.sampleDuration = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getDuration() * 10000;
if (myRef.type == "video"){
trunSample.sampleOffset = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getOffset() * 10000;
}
trun_box.setSampleInformation(trunSample, i);
}
MP4::SDTP sdtp_box;
sdtp_box.setVersion(0);
if (myRef.type == "video"){
sdtp_box.setValue(36, 4);
for (int i = 1; i < keyObj.getParts(); i++){
sdtp_box.setValue(20, 4 + i);
}
}else{
sdtp_box.setValue(40, 4);
for (int i = 1; i < keyObj.getParts(); i++){
sdtp_box.setValue(40, 4 + i);
}
}
MP4::TRAF traf_box;
traf_box.setContent(tfhd_box, 0);
traf_box.setContent(trun_box, 1);
traf_box.setContent(sdtp_box, 2);
//If the stream is live, we want to have a fragref box if possible
if (Strm.metadata.live){
MP4::UUID_TrackFragmentReference fragref_box;
fragref_box.setVersion(1);
fragref_box.setFragmentCount(0);
int fragCount = 0;
for (unsigned int i = 0; fragCount < 2 && i < myRef.keys.size() - 1; i++){
if (myRef.keys[i].getTime() > (requestedTime / 10000)){
fragref_box.setTime(fragCount, myRef.keys[i].getTime() * 10000);
fragref_box.setDuration(fragCount, myRef.keys[i].getLength() * 10000);
fragref_box.setFragmentCount(++fragCount);
}
}
traf_box.setContent(fragref_box, 3);
}
MP4::MOOF moof_box;
moof_box.setContent(mfhd_box, 0);
//Setting the correct offsets.
moof_box.setContent(traf_box, 1);
trun_box.setDataOffset(moof_box.boxedSize() + 8);
traf_box.setContent(trun_box, 1);
moof_box.setContent(traf_box, 1);
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, conn);
HTTP_S.Chunkify(moof_box.asBox(), moof_box.boxedSize(), conn);
int size = htonl(keySize + 8);
HTTP_S.Chunkify((char*)&size, 4, conn);
HTTP_S.Chunkify("mdat", 4, conn);
handlingRequest = true;
}else{
//We have a request for a Manifest, generate and send it.
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest = smoothIndex(Strm.metadata);
HTTP_S.SetBody(manifest);
HTTP_S.SendResponse("200", "OK", conn);
}
}
ready4data = true;
//Clean for any possible next requests
HTTP_R.Clean();
}else{
//Wait 250ms before checking for new data.
Util::sleep(250);
}
}else{
if (!ready4data){
//Wait 250ms before checking for new data.
Util::sleep(250);
}
}
if (ready4data){
unsigned int now = Util::epoch();
if (now != lastStats){
//Send new stats.
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Smooth"));
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
HTTP_S.Chunkify(Strm.lastData(), conn);
}
if (Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
}
}else{
Util::sleep(10);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Smooth").c_str());
ss.close();
return 0;
}//Smooth_Connector main function
}//Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Microsoft-specific smooth streaming through silverlight (also known as HSS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/smooth/$.ism/Manifest";
capa["url_prefix"] = "/smooth/$.ism/";
capa["socket"] = "http_smooth";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.ms-ss";
capa["methods"][0u]["priority"] = 9ll;
capa["methods"][0u]["nolive"] = 1;
capa["methods"][1u]["handler"] = "http";
capa["methods"][1u]["type"] = "silverlight";
capa["methods"][1u]["priority"] = 1ll;
capa["methods"][1u]["nolive"] = 1;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::smoothConnector);
} //main

View file

@ -1,223 +0,0 @@
///\file conn_http_srt.cpp
///\brief Contains the main code for the HTTP SRT Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <iomanip>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int SRTConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_time = 0;//Seek position in ms
int trackID = -1; // the track to be selected
int curIndex = 0; // SRT index
bool subtitleTrack = false; // check whether the requested track is a srt track
bool isWebVTT = false;
std::stringstream srtdata; // ss output data
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
if ( !HTTP_R.GetVar("trackid").empty()){
trackID = atoi(HTTP_R.GetVar("trackid").c_str());
}
if ( !HTTP_R.GetVar("webvtt").empty()){
isWebVTT = true;
}else{
isWebVTT = false;
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_time = start * 1000; //ms, not s
}else{
seek_time = start * 1000; //divide by 1mbit, then *1000 for ms.
}
//we are ready, connect the socket!
if ( !ss.connected()){
ss = Util::Stream::getStream(streamname);
}
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
inited = false;
continue;
}
Strm.waitForMeta(ss);
if(trackID == -1){
// no track was given. Fetch the first track that has SRT data
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (it->second.codec == "srt"){
trackID = it->second.trackID;
subtitleTrack = true;
break;
}
}
}else{
// track *was* given, but we have to check whether it's an actual srt track
subtitleTrack = Strm.metadata.tracks[trackID].codec == "srt";
}
if(!subtitleTrack){
HTTP_S.Clean();
HTTP_S.SetBody("# This track doesn't contain subtitle data.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
subtitleTrack = false;
HTTP_R.Clean();
continue;
}
std::stringstream cmd;
cmd << "t " << trackID;
int maxTime = Strm.metadata.tracks[trackID].lastms;
cmd << "\ns " << seek_time << "\np " << maxTime << "\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
HTTP_R.Clean(); //clean for any possible next requests
srtdata.clear();
curIndex = 1; // set to 1, first srt 'track'
}
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_SRT").c_str());
}
if (inited){
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if(Strm.lastType() == DTSC::META){
if(!isWebVTT)
{
srtdata << curIndex++ << std::endl;
}
long long unsigned int time = Strm.getPacket()["time"].asInt();
srtdata << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
srtdata << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
srtdata << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
srtdata << std::setfill('0') << std::setw(3) << time % 1000 << " --> ";
time += Strm.getPacket()["duration"].asInt();
srtdata << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
srtdata << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
srtdata << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
srtdata << std::setfill('0') << std::setw(3) << time % 1000 << std::endl;
srtdata << Strm.lastData() << std::endl;
}
if( Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "text/plain"); //Send the correct content-type for FLV files
HTTP_S.SetBody( (isWebVTT ? "WEBVTT\n\n" : "") + srtdata.str());
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
inited = false;
srtdata.str("");
srtdata.clear();
}
}
}else{
Util::sleep(200);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_SRT").c_str());
ss.close();
return 0;
} //SRT main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol subtitle streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.srt";
capa["url_match"] = "/$.srt";
capa["url_handler"] = "http";
capa["url_type"] = "subtitle";
capa["socket"] = "http_srt";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::SRTConnector);
} //main

View file

@ -1,58 +0,0 @@
/// \file conn_raw.cpp
/// Contains the main code for the RAW connector.
#include <iostream>
#include <sstream>
#include <mist/config.h>
#include <mist/socket.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Contains the main code for the RAW connector.
///
///Expects a single commandline argument telling it which stream to connect to,
///then outputs the raw stream to stdout.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
conf.addBasicConnectorOptions(capa);
conf.addOption("stream_name", JSON::fromString("{\"arg_num\":1, \"help\":\"Name of the stream to write to stdout.\"}"));
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << "null" << std::endl;
return -1;
}
//connect to the proper stream
Socket::Connection S = Util::Stream::getStream(conf.getString("stream_name"));
S.setBlocking(false);
if ( !S.connected()){
std::cout << "Could not open stream " << conf.getString("stream_name") << std::endl;
return 1;
}
long long int lastStats = 0;
long long int started = Util::epoch();
while (std::cout.good() && S.connected()){
if (S.spool()){
while (S.Received().size()){
std::cout.write(S.Received().get().c_str(), S.Received().get().size());
S.Received().get().clear();
}
}else{
Util::sleep(500); //sleep 500ms if no data
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
std::stringstream st;
st << "S localhost RAW " << (Util::epoch() - started) << " " << S.dataDown() << " " << S.dataUp() << "\n";
S.SendNow(st.str().c_str());
}
}
std::stringstream st;
st << "S localhost RAW " << (Util::epoch() - started) << " " << S.dataDown() << " " << S.dataUp() << "\n";
S.SendNow(st.str().c_str());
S.close();
return 0;
}

View file

@ -1,700 +0,0 @@
/// \file conn_rtmp.cpp
/// Contains the main code for the RTMP Connector
#include <iostream>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/config.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/rtmpchunks.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to the RTMP Connector
namespace Connector_RTMP {
//for connection to server
bool ready4data = false; ///< Indicates whether streaming can start.
bool inited = false; ///< Indicates whether we are ready to connect to the Buffer.
bool noStats = false; ///< Indicates when no stats should be sent anymore. Used in push mode.
bool stopParsing = false; ///< Indicates when to stop all parsing.
bool streamReset = false;
//for reply to play command
int playTransaction = -1;///<The transaction number of the reply.
int playStreamId = -1;///<The stream id of the reply.
int playMessageType = -1;///<The message type of the reply.
//generic state keeping
bool streamInited = false;///<Indicates whether init data for audio/video was sent.
int videoID = -1;
int audioID = -1;
Socket::Connection Socket; ///< A copy of the user socket to allow helper functions to directly send data.
Socket::Connection ss; ///< Socket connected to server.
std::string streamName; ///< Stream that will be opened.
std::string app_name; ///< Name of the application that was opened
///\brief Sends a RTMP command either in AMF or AMF3 mode.
///\param amfReply The data to be sent over RTMP.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void sendCommand(AMF::Object & amfReply, int messageType, int streamId){
#if DEBUG >= 8
std::cerr << amfReply.Print() << std::endl;
#endif
if (messageType == 17){
Socket.SendNow(RTMPStream::SendChunk(3, messageType, streamId, (char)0 + amfReply.Pack()));
}else{
Socket.SendNow(RTMPStream::SendChunk(3, messageType, streamId, amfReply.Pack()));
}
} //sendCommand
///\brief Parses a single AMF command message, and sends a direct response through sendCommand().
///\param amfData The received request.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void parseAMFCommand(AMF::Object & amfData, int messageType, int streamId){
#if DEBUG >= 5
fprintf(stderr, "Received command: %s\n", amfData.Print().c_str());
#endif
#if DEBUG >= 8
fprintf(stderr, "AMF0 command: %s\n", amfData.getContentP(0)->StrValue().c_str());
#endif
if (amfData.getContentP(0)->StrValue() == "connect"){
double objencoding = 0;
if (amfData.getContentP(2)->getContentP("objectEncoding")){
objencoding = amfData.getContentP(2)->getContentP("objectEncoding")->NumValue();
}
#if DEBUG >= 6
int tmpint;
if (amfData.getContentP(2)->getContentP("videoCodecs")){
tmpint = (int)amfData.getContentP(2)->getContentP("videoCodecs")->NumValue();
if (tmpint & 0x04){
fprintf(stderr, "Sorensen video support detected\n");
}
if (tmpint & 0x80){
fprintf(stderr, "H264 video support detected\n");
}
}
if (amfData.getContentP(2)->getContentP("audioCodecs")){
tmpint = (int)amfData.getContentP(2)->getContentP("audioCodecs")->NumValue();
if (tmpint & 0x04){
fprintf(stderr, "MP3 audio support detected\n");
}
if (tmpint & 0x400){
fprintf(stderr, "AAC audio support detected\n");
}
}
#endif
app_name = amfData.getContentP(2)->getContentP("tcUrl")->StrValue();
app_name = app_name.substr(app_name.find('/', 7) + 1);
RTMPStream::chunk_snd_max = 4096;
Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
Socket.Send(RTMPStream::SendCTL(6, RTMPStream::rec_window_size)); //send rec window acknowledgement size (msg 6)
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("")); //server properties
amfReply.getContentP(2)->addContent(AMF::Object("fmsVer", "FMS/3,5,5,2004"));
amfReply.getContentP(2)->addContent(AMF::Object("capabilities", (double)31));
amfReply.getContentP(2)->addContent(AMF::Object("mode", (double)1));
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetConnection.Connect.Success"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Connection succeeded."));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", 1337));
amfReply.getContentP(3)->addContent(AMF::Object("objectEncoding", objencoding));
//amfReply.getContentP(3)->addContent(AMF::Object("data", AMF::AMF0_ECMA_ARRAY));
//amfReply.getContentP(3)->getContentP(4)->addContent(AMF::Object("version", "3,5,4,1004"));
sendCommand(amfReply, messageType, streamId);
//send onBWDone packet - no clue what it is, but real server sends it...
//amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
//amfReply.addContent(AMF::Object("", "onBWDone"));//result
//amfReply.addContent(amfData.getContent(1));//same transaction ID
//amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null
//sendCommand(amfReply, messageType, streamId);
return;
} //connect
if (amfData.getContentP(0)->StrValue() == "createStream"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)1)); //stream ID - we use 1
sendCommand(amfReply, messageType, streamId);
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
return;
} //createStream
if ((amfData.getContentP(0)->StrValue() == "closeStream") || (amfData.getContentP(0)->StrValue() == "deleteStream")){
if (ss.connected()){
ss.close();
}
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCUnpublish") || (amfData.getContentP(0)->StrValue() == "releaseStream")){
// ignored
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCPublish")){
//send a FCPublic reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onFCPublish")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Please followup with publish command..."));
sendCommand(amfReply, messageType, streamId);
return;
} //FCPublish
if (amfData.getContentP(0)->StrValue() == "releaseStream"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", AMF::AMF0_UNDEFINED)); //stream ID?
sendCommand(amfReply, messageType, streamId);
return;
}//releaseStream
if ((amfData.getContentP(0)->StrValue() == "getStreamLength") || (amfData.getContentP(0)->StrValue() == "getMovLen")){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0)); //zero length
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if ((amfData.getContentP(0)->StrValue() == "publish")){
if (amfData.getContentP(3)){
streamName = amfData.getContentP(3)->StrValue();
/// \todo implement push for MistPlayer or restrict and change to getLive
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
Socket.close(); //disconnect user
return;
}
DTSC::Stream Strm;
Strm.waitForMeta(ss);
ss.Send("P ");
ss.Send(Socket.getHost().c_str());
ss.Send(" ");
ss.Send(app_name);
ss.SendNow("\n");
streamReset = true;
noStats = true;
}
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", 1, AMF::AMF0_BOOL)); //publish success?
sendCommand(amfReply, messageType, streamId);
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a status reply
amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Stream is now published!"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if (amfData.getContentP(0)->StrValue() == "checkBandwidth"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
sendCommand(amfReply, messageType, streamId);
return;
} //checkBandwidth
if ((amfData.getContentP(0)->StrValue() == "play") || (amfData.getContentP(0)->StrValue() == "play2")){
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
playTransaction = amfData.getContentP(1)->NumValue();
playMessageType = messageType;
playStreamId = streamId;
streamName = amfData.getContentP(3)->StrValue();
Connector_RTMP::ready4data = true; //start sending video data!
return;
} //play
if ((amfData.getContentP(0)->StrValue() == "seek")){
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
playTransaction = amfData.getContentP(1)->NumValue();
playMessageType = messageType;
playStreamId = streamId;
streamInited = false;
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Seek.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Seeking to the specified time"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
ss.Send("s ");
ss.Send(JSON::Value((long long int)amfData.getContentP(3)->NumValue()).asString().c_str());
ss.SendNow("\n");
return;
} //seek
if ((amfData.getContentP(0)->StrValue() == "pauseRaw") || (amfData.getContentP(0)->StrValue() == "pause")){
if (amfData.getContentP(3)->NumValue()){
ss.Send("q\n"); //quit playing
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Pause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Pausing playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
}else{
ss.SendNow("p\n"); //start playing
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Unpause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Resuming playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
}
return;
} //seek
#if DEBUG >= 2
fprintf(stderr, "AMF0 command not processed!\n%s\n", amfData.Print().c_str());
#endif
} //parseAMFCommand
///\brief Gets and parses one RTMP chunk at a time.
///\param inputBuffer A buffer filled with chunk data.
void parseChunk(Socket::Buffer & inputBuffer){
//for DTSC conversion
static DTSC::Meta meta_out;
static std::stringstream prebuffer; // Temporary buffer before sending real data
static bool sending = false;
static unsigned int counter = 0;
//for chunk parsing
static RTMPStream::Chunk next;
static FLV::Tag F;
static AMF::Object amfdata("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object amfelem("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object3 amf3data("empty", AMF::AMF3_DDV_CONTAINER);
static AMF::Object3 amf3elem("empty", AMF::AMF3_DDV_CONTAINER);
while (next.Parse(inputBuffer)){
//send ACK if we received a whole window
if ((RTMPStream::rec_cnt - RTMPStream::rec_window_at > RTMPStream::rec_window_size)){
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
}
switch (next.msg_type_id){
case 0: //does not exist
#if DEBUG >= 2
fprintf(stderr, "UNKN: Received a zero-type message. Possible data corruption? Aborting!\n");
#endif
while (inputBuffer.size()){
inputBuffer.get().clear();
}
ss.close();
Socket.close();
break; //happens when connection breaks unexpectedly
case 1: //set chunk size
RTMPStream::chunk_rec_max = ntohl(*(int*)next.data.c_str());
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set chunk size: %i\n", RTMPStream::chunk_rec_max);
#endif
break;
case 2: //abort message - we ignore this one
#if DEBUG >= 5
fprintf(stderr, "CTRL: Abort message\n");
#endif
//4 bytes of stream id to drop
break;
case 3: //ack
#if DEBUG >= 8
fprintf(stderr, "CTRL: Acknowledgement\n");
#endif
RTMPStream::snd_window_at = ntohl(*(int*)next.data.c_str());
RTMPStream::snd_window_at = RTMPStream::snd_cnt;
break;
case 4: {
//2 bytes event type, rest = event data
//types:
//0 = stream begin, 4 bytes ID
//1 = stream EOF, 4 bytes ID
//2 = stream dry, 4 bytes ID
//3 = setbufferlen, 4 bytes ID, 4 bytes length
//4 = streamisrecorded, 4 bytes ID
//6 = pingrequest, 4 bytes data
//7 = pingresponse, 4 bytes data
//we don't need to process this
#if DEBUG >= 5
short int ucmtype = ntohs(*(short int*)next.data.c_str());
switch (ucmtype){
case 0:
fprintf(stderr, "CTRL: UCM StreamBegin %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 1:
fprintf(stderr, "CTRL: UCM StreamEOF %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 2:
fprintf(stderr, "CTRL: UCM StreamDry %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 3:
fprintf(stderr, "CTRL: UCM SetBufferLength %i %i\n", ntohl(*((int*)(next.data.c_str()+2))), ntohl(*((int*)(next.data.c_str()+6))));
break;
case 4:
fprintf(stderr, "CTRL: UCM StreamIsRecorded %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 6:
fprintf(stderr, "CTRL: UCM PingRequest %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 7:
fprintf(stderr, "CTRL: UCM PingResponse %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
default:
fprintf(stderr, "CTRL: UCM Unknown (%hi)\n", ucmtype);
break;
}
#endif
}
break;
case 5: //window size of other end
#if DEBUG >= 5
fprintf(stderr, "CTRL: Window size\n");
#endif
RTMPStream::rec_window_size = ntohl(*(int*)next.data.c_str());
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
break;
case 6:
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set peer bandwidth\n");
#endif
//4 bytes window size, 1 byte limit type (ignored)
RTMPStream::snd_window_size = ntohl(*(int*)next.data.c_str());
Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
break;
case 8: //audio data
case 9: //video data
case 18: //meta data
if (ss.connected()){
if (streamReset){
//reset push data to empty, in case stream properties change
meta_out.reset();
prebuffer.str("");
sending = false;
counter = 0;
streamReset = false;
}
F.ChunkLoader(next);
JSON::Value pack_out = F.toJSON(meta_out);
if ( !pack_out.isNull()){
if ( !sending){
counter++;
if (counter > 8){
sending = true;
meta_out.send(ss);
ss.SendNow(prebuffer.str()); //write buffer
prebuffer.str(""); //clear buffer
pack_out.sendTo(ss);
}else{
prebuffer << pack_out.toNetPacked();
}
}else{
pack_out.sendTo(ss);
}
}
}else{
#if DEBUG >= 5
fprintf(stderr, "Received useless media data\n");
#endif
Socket.close();
}
break;
case 15:
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 data message\n");
#endif
break;
case 16:
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 shared object\n");
#endif
break;
case 17: {
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 command message\n");
#endif
if (next.data[0] != 0){
next.data = next.data.substr(1);
amf3data = AMF::parse3(next.data);
#if DEBUG >= 5
amf3data.Print();
#endif
}else{
#if DEBUG >= 5
fprintf(stderr, "Received AFM3-0 command message\n");
#endif
next.data = next.data.substr(1);
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 17, next.msg_stream_id);
} //parsing AMF0-style
}
break;
case 19:
#if DEBUG >= 5
fprintf(stderr, "Received AFM0 shared object\n");
#endif
break;
case 20: { //AMF0 command message
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 20, next.msg_stream_id);
}
break;
case 22:
#if DEBUG >= 5
fprintf(stderr, "Received aggregate message\n");
#endif
break;
default:
#if DEBUG >= 1
fprintf(stderr, "Unknown chunk received! Probably protocol corruption, stopping parsing of incoming data.\n");
#endif
stopParsing = true;
break;
}
}
} //parseChunk
///\brief Main function for the RTMP Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int rtmpConnector(Socket::Connection & conn){
Socket = conn;
Socket.setBlocking(false);
FLV::Tag tag, init_tag;
DTSC::Stream Strm;
while ( !Socket.Received().available(1537) && Socket.connected()){
Socket.spool();
Util::sleep(5);
}
RTMPStream::handshake_in = Socket.Received().remove(1537);
RTMPStream::rec_cnt += 1537;
if (RTMPStream::doHandshake()){
Socket.SendNow(RTMPStream::handshake_out);
while ( !Socket.Received().available(1536) && Socket.connected()){
Socket.spool();
Util::sleep(5);
}
Socket.Received().remove(1536);
RTMPStream::rec_cnt += 1536;
#if DEBUG >= 5
fprintf(stderr, "Handshake succcess!\n");
#endif
}else{
fprintf(stderr, "RTMP: Handshake fail!\n");
return 0;
}
unsigned int lastStats = 0;
bool firsttime = true;
while (Socket.connected()){
if (Socket.spool() || firsttime){
parseChunk(Socket.Received());
firsttime = false;
}else{
Util::sleep(1); //sleep 1ms to prevent high CPU usage
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
Socket.close(); //disconnect user
break;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
//find first audio and video tracks
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6")){
videoID = it->second.trackID;
}
if (audioID == -1 && (it->second.codec == "AAC" || it->second.codec == "MP3")){
audioID = it->second.trackID;
}
}
//select the tracks and play
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\np\n";
ss.SendNow(cmd.str().c_str());
inited = true;
}
if (inited && !noStats){
long long int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(Socket.getStats("RTMP"));
}
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (playTransaction != -1){
//send a status reply
AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting..."));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
//send streamisrecorded if stream, well, is recorded.
if (Strm.metadata.vod){//isMember("length") && Strm.metadata["length"].asInt() > 0){
Socket.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1
}
//send streambegin
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//and more reply
amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!"));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
RTMPStream::chunk_snd_max = 102400; //100KiB
Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
//send dunno?
Socket.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1
playTransaction = -1;
}
//sent init data if needed
if ( !streamInited){
init_tag.DTSCMetaInit(Strm, Strm.metadata.tracks[videoID], Strm.metadata.tracks[audioID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
if (audioID != -1){
init_tag.DTSCAudioInit(Strm.metadata.tracks[audioID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
}
if (videoID != -1){
init_tag.DTSCVideoInit(Strm.metadata.tracks[videoID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
}
streamInited = true;
}
//sent a tag
if (tag.DTSCLoader(Strm)){
if (tag.len){
Socket.SendNow(RTMPStream::SendMedia(tag));
#if DEBUG >= 8
fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str());
#endif
}
}
}
}
}
}
Socket.close();
ss.SendNow(Socket.getStats("RTMP").c_str());
ss.close();
return 0;
} //Connector_RTMP
}
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables the RTMP protocol which is used by Adobe Flash Player.";
capa["deps"] = "";
capa["url_rel"] = "/play/$";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "rtmp";
capa["methods"][0u]["type"] = "flash/10";
capa["methods"][0u]["priority"] = 6ll;
conf.addConnectorOptions(1935, capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_RTMP::rtmpConnector);
} //main

View file

@ -1,215 +0,0 @@
/// \file conn_ts.cpp
/// Contains the main code for the TS Connector
#include <queue>
#include <string>
#include <iostream>
#include <cmath>
#include <ctime>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <unistd.h>
#include <getopt.h>
#include <sys/time.h>
#include <sys/wait.h>
#include <sys/types.h>
#include <mist/socket.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/ts_packet.h> //TS support
#include <mist/dtsc.h> //DTSC support
#include <mist/mp4.h> //For initdata conversion
#include <mist/mp4_generic.h>
///\brief Holds everything unique to the TS Connector
namespace Connector_TS {
std::string streamName;
std::string trackIDs;
///\brief Main function for the TS Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int tsConnector(Socket::Connection & conn){
std::string ToPack;
TS::Packet PackData;
std::string DTMIData;
int PacketNumber = 0;
long long unsigned int TimeStamp = 0;
unsigned int ThisNaluSize;
char VideoCounter = 0;
char AudioCounter = 0;
bool IsKeyFrame = false;
MP4::AVCC avccbox;
bool haveAvcc = false;
DTSC::Stream Strm;
bool inited = false;
Socket::Connection ss;
while (conn.connected()){
if ( !inited){
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
conn.close();
break;
}
if(trackIDs == ""){
std::stringstream tmpTracks;
// no track ids given? Find the first video and first audio track (if available) and use those!
int videoID = -1;
int audioID = -1;
Strm.waitForMeta(ss);
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (audioID == -1 && it->second.codec == "AAC"){
audioID = it->first;
tmpTracks << " " << it->first;
}
if (videoID == -1 && it->second.codec == "H264"){
videoID = it->first;
tmpTracks << " " << it->first;
}
} // for iterator
trackIDs += tmpTracks.str();
} // if trackIDs == ""
std::string cmd = "t " + trackIDs + "\ns 0\np\n";
ss.SendNow( cmd );
inited = true;
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
std::stringstream TSBuf;
Socket::Buffer ToPack;
//write PAT and PMT TS packets
if (PacketNumber == 0){
PackData.DefaultPAT();
TSBuf.write(PackData.ToString(), 188);
PackData.DefaultPMT();
TSBuf.write(PackData.ToString(), 188);
PacketNumber += 2;
}
int PIDno = 0;
char * ContCounter = 0;
if (Strm.lastType() == DTSC::VIDEO){
if ( !haveAvcc){
avccbox.setPayload(Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].init);
haveAvcc = true;
}
IsKeyFrame = Strm.getPacket().isMember("keyframe");
if (IsKeyFrame){
TimeStamp = (Strm.getPacket()["time"].asInt() * 27000);
}
ToPack.append(avccbox.asAnnexB());
while (Strm.lastData().size() > 4){
ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3];
Strm.lastData().replace(0, 4, "\000\000\000\001", 4);
if (ThisNaluSize + 4 == Strm.lastData().size()){
ToPack.append(Strm.lastData());
break;
}else{
ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4);
Strm.lastData().erase(0, ThisNaluSize + 4);
}
}
ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &VideoCounter;
}else if (Strm.lastType() == DTSC::AUDIO){
ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].init));
ToPack.append(Strm.lastData());
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &AudioCounter;
IsKeyFrame = false;
}
//initial packet
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(TimeStamp);
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
TSBuf.write(PackData.ToString(), 188);
PacketNumber++;
//rest of packets
while (ToPack.size()){
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
toSend = PackData.AddStuffing(ToPack.bytes(184));
gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
TSBuf.write(PackData.ToString(), 188);
PacketNumber++;
}
TSBuf.flush();
if (TSBuf.str().size()){
conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size());
TSBuf.str("");
}
TSBuf.str("");
PacketNumber = 0;
}
}else{
Util::sleep(1000);
conn.spool();
}
}
return 0;
}
}
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables the raw MPEG Transport Stream protocol over TCP.";
capa["deps"] = "";
capa["required"]["streamname"]["name"] = "Stream";
capa["required"]["streamname"]["help"] = "What streamname to serve. For multiple streams, add this protocol multiple times using different ports.";
capa["required"]["streamname"]["type"] = "str";
capa["required"]["streamname"]["option"] = "--stream";
capa["optional"]["tracks"]["name"] = "Tracks";
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
conf.addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
conf.addOption("tracks",
JSON::fromString("{\"arg\":\"string\",\"value\":[\"\"],\"short\": \"t\",\"long\":\"tracks\",\"help\":\"The track IDs of the stream that this connector will transmit separated by spaces.\"}"));
conf.addConnectorOptions(8888, capa);
bool ret = conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
if (!ret){
std::cerr << "Usage error: missing argument(s)." << std::endl;
conf.printHelp(std::cout);
return 1;
}
Connector_TS::streamName = conf.getString("streamname");
Connector_TS::trackIDs = conf.getString("tracks");
return conf.serveForkedSocket(Connector_TS::tsConnector);
} //main

View file

@ -20,8 +20,14 @@
#include "controller_connectors.h"
#include "controller_streams.h"
#include "controller_capabilities.h"
#include "controller_statistics.h"
#include "server.html.h"
#include <mist/tinythread.h>
#include <mist/shared_memory.h>
#define UPLINK_INTERVAL 30
#ifndef COMPILED_USERNAME
@ -31,6 +37,7 @@
///\brief Holds everything unique to the controller.
namespace Controller {
Util::Config conf;
Secure::Auth keychecker; ///< Checks key authorization.
@ -133,42 +140,23 @@ namespace Controller {
out = in;
}
///\brief Parse received statistics.
///\param stats The statistics to be parsed.
void CheckStats(JSON::Value & stats){
long long int currTime = Util::epoch();
for (JSON::ObjIter jit = stats.ObjBegin(); jit != stats.ObjEnd(); jit++){
if (currTime - lastBuffer[jit->first] > 120){
stats.removeMember(jit->first);
return;
}else{
if (jit->second.isMember("curr") && jit->second["curr"].size() > 0){
for (JSON::ObjIter u_it = jit->second["curr"].ObjBegin(); u_it != jit->second["curr"].ObjEnd(); ++u_it){
if (u_it->second.isMember("now") && u_it->second["now"].asInt() < currTime - 3){
jit->second["log"].append(u_it->second);
jit->second["curr"].removeMember(u_it->first);
if ( !jit->second["curr"].size()){
break;
}
u_it = jit->second["curr"].ObjBegin();
}
}
}
}
}
}
} //Controller namespace
/// the following function is a simple check if the user wants to proceed to fix (y), ignore (n) or abort on (a) a question
char yna(std::string user_input){
if(user_input == "y" || user_input == "Y"){
char yna(std::string & user_input){
switch (user_input[0]){
case 'y': case 'Y':
return 'y';
}else if(user_input == "n" || user_input == "N"){
break;
case 'n': case 'N':
return 'n';
}else if(user_input == "a" || user_input == "A"){
break;
case 'a': case 'A':
return 'a';
}else{
return 'x';//when no valid option is found, yna returns x
break;
default:
return 'x';
break;
}
}
@ -210,37 +198,37 @@ int main(int argc, char ** argv){
if ( !stored_user["default"]){
stored_user["default"] = "root";
}
Util::Config conf = Util::Config(argv[0], PACKAGE_VERSION " / " RELEASE);
conf.addOption("listen_port", stored_port);
conf.addOption("listen_interface", stored_interface);
conf.addOption("username", stored_user);
conf.addOption("daemonize",
Controller::conf = Util::Config(argv[0], PACKAGE_VERSION " / " RELEASE);
Controller::conf.addOption("listen_port", stored_port);
Controller::conf.addOption("listen_interface", stored_interface);
Controller::conf.addOption("username", stored_user);
Controller::conf.addOption("daemonize",
JSON::fromString(
"{\"long\":\"daemon\", \"short\":\"d\", \"default\":0, \"long_off\":\"nodaemon\", \"short_off\":\"n\", \"help\":\"Turns deamon mode on (-d) or off (-n). -d runs quietly in background, -n (default) enables verbose in foreground.\"}"));
conf.addOption("account",
Controller::conf.addOption("account",
JSON::fromString(
"{\"long\":\"account\", \"short\":\"a\", \"arg\":\"string\" \"default\":\"\", \"help\":\"A username:password string to create a new account with.\"}"));
conf.addOption("logfile",
Controller::conf.addOption("logfile",
JSON::fromString(
"{\"long\":\"logfile\", \"short\":\"L\", \"arg\":\"string\" \"default\":\"\",\"help\":\"Redirect all standard output to a log file, provided with an argument\"}"));
conf.addOption("configFile",
Controller::conf.addOption("configFile",
JSON::fromString(
"{\"long\":\"config\", \"short\":\"c\", \"arg\":\"string\" \"default\":\"config.json\", \"help\":\"Specify a config file other than default.\"}"));
conf.addOption("uplink",
Controller::conf.addOption("uplink",
JSON::fromString(
"{\"default\":\"\", \"arg\":\"string\", \"help\":\"MistSteward uplink host and port.\", \"short\":\"U\", \"long\":\"uplink\"}"));
conf.addOption("uplink-name",
Controller::conf.addOption("uplink-name",
JSON::fromString(
"{\"default\":\"" COMPILED_USERNAME "\", \"arg\":\"string\", \"help\":\"MistSteward uplink username.\", \"short\":\"N\", \"long\":\"uplink-name\"}"));
conf.addOption("uplink-pass",
Controller::conf.addOption("uplink-pass",
JSON::fromString(
"{\"default\":\"" COMPILED_PASSWORD "\", \"arg\":\"string\", \"help\":\"MistSteward uplink password.\", \"short\":\"P\", \"long\":\"uplink-pass\"}"));
conf.parseArgs(argc, argv);
if(conf.getString("logfile")!= ""){
Controller::conf.parseArgs(argc, argv);
if(Controller::conf.getString("logfile")!= ""){
//open logfile, dup stdout to logfile
int output = open(conf.getString("logfile").c_str(),O_APPEND|O_CREAT|O_WRONLY,S_IRWXU);
int output = open(Controller::conf.getString("logfile").c_str(),O_APPEND|O_CREAT|O_WRONLY,S_IRWXU);
if(output < 0){
DEBUG_MSG(DLVL_ERROR, "Could not redirect output to %s: %s",conf.getString("logfile").c_str(),strerror(errno));
DEBUG_MSG(DLVL_ERROR, "Could not redirect output to %s: %s",Controller::conf.getString("logfile").c_str(),strerror(errno));
return 7;
}else{
dup2(output,STDOUT_FILENO);
@ -255,27 +243,25 @@ int main(int argc, char ** argv){
}
}
//Input custom config here
Controller::Storage = JSON::fromFile(conf.getString("configFile"));
Controller::Storage = JSON::fromFile(Controller::conf.getString("configFile"));
//check for port, interface and username in arguments
//if they are not there, take them from config file, if there
if (conf.getOption("listen_port", true).size() <= 1){
if (Controller::conf.getOption("listen_port", true).size() <= 1){
if (Controller::Storage["config"]["controller"]["port"]){
conf.getOption("listen_port") = Controller::Storage["config"]["controller"]["port"];
Controller::conf.getOption("listen_port") = Controller::Storage["config"]["controller"]["port"];
}
}
if (conf.getOption("listen_interface", true).size() <= 1){
if (Controller::conf.getOption("listen_interface", true).size() <= 1){
if (Controller::Storage["config"]["controller"]["interface"]){
conf.getOption("listen_interface") = Controller::Storage["config"]["controller"]["interface"];
Controller::conf.getOption("listen_interface") = Controller::Storage["config"]["controller"]["interface"];
}
}
if (conf.getOption("username", true).size() <= 1){
if (Controller::conf.getOption("username", true).size() <= 1){
if (Controller::Storage["config"]["controller"]["username"]){
conf.getOption("username") = Controller::Storage["config"]["controller"]["username"];
Controller::conf.getOption("username") = Controller::Storage["config"]["controller"]["username"];
}
}
JSON::Value capabilities;
//list available protocols and report about them
std::deque<std::string> execs;
@ -284,6 +270,10 @@ int main(int argc, char ** argv){
char const * conn_args[] = {0, "-j", 0};
for (std::deque<std::string>::iterator it = execs.begin(); it != execs.end(); it++){
if ((*it).substr(0, 8) == "MistConn"){
//skip if an MistOut already existed - MistOut takes precedence!
if (capabilities["connectors"].isMember((*it).substr(8))){
continue;
}
arg_one = Util::getMyPath() + (*it);
conn_args[0] = arg_one.c_str();
capabilities["connectors"][(*it).substr(8)] = JSON::fromString(Util::Procs::getOutputOf((char**)conn_args));
@ -291,9 +281,17 @@ int main(int argc, char ** argv){
capabilities["connectors"].removeMember((*it).substr(8));
}
}
if ((*it).substr(0, 7) == "MistOut"){
arg_one = Util::getMyPath() + (*it);
conn_args[0] = arg_one.c_str();
capabilities["connectors"][(*it).substr(7)] = JSON::fromString(Util::Procs::getOutputOf((char**)conn_args));
if (capabilities["connectors"][(*it).substr(7)].size() < 1){
capabilities["connectors"].removeMember((*it).substr(7));
}
}
}
createAccount(conf.getString("account"));
createAccount(Controller::conf.getString("account"));
/// User friendliness input added at this line
if (isatty(fileno(stdin))){
@ -340,11 +338,11 @@ int main(int argc, char ** argv){
}
//check for streams
if ( !Controller::Storage.isMember("streams") || Controller::Storage["streams"].size() < 1){
std::cerr << "No streams configured, remember to set up streams through local settings page on port " << conf.getInteger("listen_port") << " or using the API." << std::endl;
std::cerr << "No streams configured, remember to set up streams through local settings page on port " << Controller::conf.getInteger("listen_port") << " or using the API." << std::endl;
}
}
std::string uplink_addr = conf.getString("uplink");
std::string uplink_addr = Controller::conf.getString("uplink");
std::string uplink_host = "";
int uplink_port = 0;
if (uplink_addr.size() > 0){
@ -359,7 +357,7 @@ int main(int argc, char ** argv){
time_t lastuplink = 0;
time_t processchecker = 0;
Socket::Server API_Socket = Socket::Server(conf.getInteger("listen_port"), conf.getString("listen_interface"), true);
Socket::Server API_Socket = Socket::Server(Controller::conf.getInteger("listen_port"), Controller::conf.getString("listen_interface"), true);
Socket::Server Stats_Socket = Socket::Server(Util::getTmpFolder() + "statistics", true);
Socket::Connection Incoming;
std::vector<Controller::ConnectedUser> users;
@ -369,19 +367,21 @@ int main(int argc, char ** argv){
std::string jsonp;
Controller::ConnectedUser * uplink = 0;
Controller::Log("CONF", "Controller started");
conf.activate();
Controller::conf.activate();
//Create a converter class and automatically load in all encoders.
Converter::Converter myConverter;
while (API_Socket.connected() && conf.is_active){
tthread::thread statsThread(Controller::SharedMemStats, &Controller::conf);
while (API_Socket.connected() && Controller::conf.is_active){
Util::sleep(10);//sleep for 10 ms - prevents 100% CPU time
if (Util::epoch() - processchecker > 10){
if (Util::epoch() - processchecker > 5){
processchecker = Util::epoch();
Controller::CheckProtocols(Controller::Storage["config"]["protocols"], capabilities);
Controller::CheckAllStreams(Controller::Storage["streams"]);
Controller::CheckStats(Controller::Storage["statistics"]);
myConverter.updateStatus();
}
if (uplink_port && Util::epoch() - lastuplink > UPLINK_INTERVAL){
@ -414,7 +414,8 @@ int main(int argc, char ** argv){
Response["config"] = Controller::Storage["config"];
Response["streams"] = Controller::Storage["streams"];
Response["log"] = Controller::Storage["log"];
Response["statistics"] = Controller::Storage["statistics"];
/// \todo Put this back in, someway, somehow...
//Response["statistics"] = Controller::Storage["statistics"];
Response["now"] = (unsigned int)lastuplink;
uplink->H.Clean();
uplink->H.SetBody("command=" + HTTP::Parser::urlencode(Response.toString()));
@ -431,93 +432,6 @@ int main(int argc, char ** argv){
if (Incoming.connected()){
users.push_back((Controller::ConnectedUser)Incoming);
}
Incoming = Stats_Socket.accept(true);
if (Incoming.connected()){
buffers.push_back(Incoming);
}
if (buffers.size() > 0){
for (std::vector<Socket::Connection>::iterator it = buffers.begin(); it != buffers.end(); it++){
if ( !it->connected()){
it->close();
buffers.erase(it);
break;
}
if (it->spool()){
while (it->Received().size()){
it->Received().get().resize(it->Received().get().size() - 1);
Request = JSON::fromString(it->Received().get());
it->Received().get().clear();
if (Request.isMember("buffer")){
std::string thisbuffer = Request["buffer"];
Controller::lastBuffer[thisbuffer] = Util::epoch();
//if metadata is available, store it
if (Request.isMember("meta")){
Controller::Storage["streams"][thisbuffer]["meta"] = Request["meta"];
}
if (Controller::Storage["streams"][thisbuffer].isMember("updated")){
Controller::Storage["streams"][thisbuffer].removeMember("updated");
if (Controller::Storage["streams"][thisbuffer].isMember("cut")){
it->SendNow("c"+Controller::Storage["streams"][thisbuffer]["cut"].asString()+"\n");
}else{
it->SendNow("c0\n");
}
if (Controller::Storage["streams"][thisbuffer].isMember("DVR")){
it->SendNow("d"+Controller::Storage["streams"][thisbuffer]["DVR"].asString()+"\n");
}else{
it->SendNow("d20000\n");
}
if (Controller::Storage["streams"][thisbuffer].isMember("source") && Controller::Storage["streams"][thisbuffer]["source"].asStringRef().substr(0, 7) == "push://"){
it->SendNow("s"+Controller::Storage["streams"][thisbuffer]["source"].asStringRef().substr(7)+"\n");
}else{
it->SendNow("s127.0.01\n");
}
}
if (Request.isMember("totals")){
Controller::Storage["statistics"][thisbuffer]["curr"] = Request["curr"];
std::string nowstr = Request["totals"]["now"].asString();
Controller::Storage["statistics"][thisbuffer]["totals"][nowstr] = Request["totals"];
Controller::Storage["statistics"][thisbuffer]["totals"][nowstr].removeMember("now");
Controller::Storage["statistics"][thisbuffer]["totals"].shrink(600); //limit to 10 minutes of data
for (JSON::ObjIter jit = Request["log"].ObjBegin(); jit != Request["log"].ObjEnd(); jit++){
Controller::Storage["statistics"][thisbuffer]["log"].append(jit->second);
Controller::Storage["statistics"][thisbuffer]["log"].shrink(1000); //limit to 1000 users per buffer
}
}
}
if (Request.isMember("vod")){
std::string thisfile = Request["vod"]["filename"];
for (JSON::ObjIter oit = Controller::Storage["streams"].ObjBegin(); oit != Controller::Storage["streams"].ObjEnd(); ++oit){
if ((oit->second.isMember("source") && oit->second["source"].asString() == thisfile)
|| (oit->second.isMember("channel") && oit->second["channel"]["URL"].asString() == thisfile)){
Controller::lastBuffer[oit->first] = Util::epoch();
if (Request["vod"].isMember("meta")){
Controller::Storage["streams"][oit->first]["meta"] = Request["vod"]["meta"];
}
JSON::Value sockit = (long long int)it->getSocket();
std::string nowstr = Request["vod"]["now"].asString();
Controller::Storage["statistics"][oit->first]["curr"][sockit.asString()] = Request["vod"];
Controller::Storage["statistics"][oit->first]["curr"][sockit.asString()].removeMember("meta");
JSON::Value nowtotal;
for (JSON::ObjIter u_it = Controller::Storage["statistics"][oit->first]["curr"].ObjBegin();
u_it != Controller::Storage["statistics"][oit->first]["curr"].ObjEnd(); ++u_it){
nowtotal["up"] = nowtotal["up"].asInt() + u_it->second["up"].asInt();
nowtotal["down"] = nowtotal["down"].asInt() + u_it->second["down"].asInt();
nowtotal["count"] = nowtotal["count"].asInt() + 1;
}
Controller::Storage["statistics"][oit->first]["totals"][nowstr] = nowtotal;
Controller::Storage["statistics"][oit->first]["totals"].shrink(600);
}
}
}
if (Request.isMember("ctrl_log") && Request["ctrl_log"].size() > 0){
for (JSON::ArrIter it = Request["ctrl_log"].ArrBegin(); it != Request["ctrl_log"].ArrEnd(); it++){
Controller::Log((*it)[0u], (*it)[1u]);
}
}
}
}
}
}
if (users.size() > 0){
for (std::vector<Controller::ConnectedUser>::iterator it = users.begin(); it != users.end(); it++){
if ( !it->C.connected() || it->logins > 3){
@ -543,12 +457,13 @@ int main(int argc, char ** argv){
Response["config"] = Controller::Storage["config"];
Response["streams"] = Controller::Storage["streams"];
Response["log"] = Controller::Storage["log"];
Response["statistics"] = Controller::Storage["statistics"];
Response["authorize"]["username"] = conf.getString("uplink-name");
/// \todo Put this back in, someway, somehow...
//Response["statistics"] = Controller::Storage["statistics"];
Response["authorize"]["username"] = Controller::conf.getString("uplink-name");
Controller::checkCapable(capabilities);
Response["capabilities"] = capabilities;
Controller::Log("UPLK", "Responding to login challenge: " + Request["authorize"]["challenge"].asString());
Response["authorize"]["password"] = Secure::md5(conf.getString("uplink-pass") + Request["authorize"]["challenge"].asString());
Response["authorize"]["password"] = Secure::md5(Controller::conf.getString("uplink-pass") + Request["authorize"]["challenge"].asString());
it->H.Clean();
it->H.SetBody("command=" + HTTP::Parser::urlencode(Response.toString()));
it->H.BuildRequest();
@ -568,7 +483,6 @@ int main(int argc, char ** argv){
}
if (Request.isMember("clearstatlogs")){
Controller::Storage["log"].null();
Controller::Storage["statistics"].null();
}
}
}else{
@ -578,8 +492,9 @@ int main(int argc, char ** argv){
it->H.SetHeader("Content-Type", "text/html");
it->H.SetHeader("X-Info", "To force an API response, request the file /api");
it->H.SetHeader("Server", "mistserver/" PACKAGE_VERSION "/" + Util::Config::libver + "/" RELEASE);
it->H.SetBody(std::string((char*)server_html, (size_t)server_html_len));
it->C.Send(it->H.BuildResponse("200", "OK"));
it->H.SetHeader("Content-Length", server_html_len);
it->H.SendResponse("200", "OK", it->C);
it->C.SendNow(server_html, server_html_len);
it->H.Clean();
}else{
Authorize(Request, Response, ( *it));
@ -622,10 +537,10 @@ int main(int argc, char ** argv){
}
}
if (Request.isMember("save")){
if( Controller::WriteFile(conf.getString("configFile"), Controller::Storage.toString())){
if( Controller::WriteFile(Controller::conf.getString("configFile"), Controller::Storage.toString())){
Controller::Log("CONF", "Config written to file on request through API");
}else{
Controller::Log("ERROR", "Config " + conf.getString("configFile") + " could not be written");
Controller::Log("ERROR", "Config " + Controller::conf.getString("configFile") + " could not be written");
}
}
//sent current configuration, no matter if it was changed or not
@ -640,11 +555,15 @@ int main(int argc, char ** argv){
}
//sent any available logs and statistics
Response["log"] = Controller::Storage["log"];
Response["statistics"] = Controller::Storage["statistics"];
//clear log and statistics if requested
if (Request.isMember("clearstatlogs")){
Controller::Storage["log"].null();
Controller::Storage["statistics"].null();
}
if (Request.isMember("clients")){
Controller::fillClients(Request["clients"], Response["clients"]);
}
if (Request.isMember("totals")){
Controller::fillTotals(Request["totals"], Response["totals"]);
}
}
@ -657,6 +576,11 @@ int main(int argc, char ** argv){
}
it->H.Clean();
it->H.SetHeader("Content-Type", "text/javascript");
it->H.SetHeader("Access-Control-Allow-Origin", "*");
it->H.SetHeader("Access-Control-Allow-Methods", "GET, POST");
it->H.SetHeader("Access-Control-Allow-Headers", "Content-Type, X-Requested-With");
it->H.SetHeader("Access-Control-Allow-Credentials", "true");
if (jsonp == ""){
it->H.SetBody(Response.toString() + "\n\n");
}else{
@ -671,15 +595,17 @@ int main(int argc, char ** argv){
}
}
}
if (!conf.is_active){
if (!Controller::conf.is_active){
Controller::Log("CONF", "Controller shutting down because of user request (received shutdown signal)");
}
if (!API_Socket.connected()){
Controller::Log("CONF", "Controller shutting down because of socket problem (API port closed)");
}
Controller::conf.is_active = false;
API_Socket.close();
if ( !Controller::WriteFile(conf.getString("configFile"), Controller::Storage.toString())){
std::cerr << "Error writing config " << conf.getString("configFile") << std::endl;
statsThread.join();
if ( !Controller::WriteFile(Controller::conf.getString("configFile"), Controller::Storage.toString())){
std::cerr << "Error writing config " << Controller::conf.getString("configFile") << std::endl;
Controller::Storage.removeMember("log");
for (JSON::ObjIter it = Controller::Storage["streams"].ObjBegin(); it != Controller::Storage["streams"].ObjEnd(); it++){
it->second.removeMember("meta");

View file

@ -1,6 +1,7 @@
#include <stdio.h> // cout, cerr
#include <string>
#include <cstring> // strcpy
#include <sys/stat.h> //stat
#include <mist/json.h>
#include <mist/config.h>
#include <mist/procs.h>
@ -55,7 +56,14 @@ namespace Controller {
static inline void buildPipedArguments(JSON::Value & p, char * argarr[], JSON::Value & capabilities){
int argnum = 0;
static std::string tmparg;
tmparg = Util::getMyPath() + std::string("MistOut") + p["connector"].asStringRef();
struct stat buf;
if (::stat(tmparg.c_str(), &buf) != 0){
tmparg = Util::getMyPath() + std::string("MistConn") + p["connector"].asStringRef();
}
if (::stat(tmparg.c_str(), &buf) != 0){
return;
}
argarr[argnum++] = (char*)tmparg.c_str();
argarr[argnum++] = (char*)"-n";
JSON::Value & pipedCapa = capabilities["connectors"][p["connector"].asStringRef()];

View file

@ -0,0 +1,451 @@
#include <cstdio>
#include <mist/config.h>
#include "controller_statistics.h"
/// The STAT_CUTOFF define sets how many seconds of statistics history is kept.
#define STAT_CUTOFF 600
// These are used to store "clients" field requests in a bitfield for speedup.
#define STAT_CLI_HOST 1
#define STAT_CLI_STREAM 2
#define STAT_CLI_PROTO 4
#define STAT_CLI_CONNTIME 8
#define STAT_CLI_POSITION 16
#define STAT_CLI_DOWN 32
#define STAT_CLI_UP 64
#define STAT_CLI_BPS_DOWN 128
#define STAT_CLI_BPS_UP 256
#define STAT_CLI_ALL 0xFFFF
// These are used to store "totals" field requests in a bitfield for speedup.
#define STAT_TOT_CLIENTS 1
#define STAT_TOT_BPS_DOWN 2
#define STAT_TOT_BPS_UP 4
#define STAT_TOT_ALL 0xFF
std::multimap<unsigned long long int, Controller::statStorage> Controller::oldConns;///<Old connections, sorted on disconnect timestamp
std::map<unsigned long, Controller::statStorage> Controller::curConns;///<Connection storage, sorted on page location.
/// This function runs as a thread and roughly once per second retrieves
/// statistics from all connected clients, as well as wipes
/// old statistics that have disconnected over 10 minutes ago.
void Controller::SharedMemStats(void * config){
DEBUG_MSG(DLVL_HIGH, "Starting stats thread");
IPC::sharedServer statServer("statistics", 88, true);
while(((Util::Config*)config)->is_active){
//parse current users
statServer.parseEach(parseStatistics);
//wipe old statistics
while (oldConns.size() && oldConns.begin()->first < (unsigned long long)(Util::epoch() - STAT_CUTOFF)){
oldConns.erase(oldConns.begin());
}
Util::sleep(1000);
}
DEBUG_MSG(DLVL_HIGH, "Stopping stats thread");
}
/// This function is called by parseStatistics.
/// It updates the internally saved statistics data.
void Controller::statStorage::update(IPC::statExchange & data) {
if (streamName == ""){
host = data.host();
streamName = data.streamName();
connector = data.connector();
}
statLog tmp;
tmp.time = data.time();
tmp.lastSecond = data.lastSecond();
tmp.down = data.down();
tmp.up = data.up();
log[data.now()] = tmp;
//wipe data older than approx. STAT_CUTOFF seconds
if (log.size() > STAT_CUTOFF){
log.erase(log.begin());
}
}
/// This function is called by the shared memory page that holds statistics.
/// It updates the internally saved statistics data, archiving if neccessary.
void Controller::parseStatistics(char * data, size_t len, unsigned int id){
IPC::statExchange tmpEx(data);
curConns[id].update(tmpEx);
char counter = (*(data - 1));
if (counter == 126 || counter == 127 || counter == 254 || counter == 255){
oldConns.insert(std::pair<unsigned long long int, statStorage>(Util::epoch(), curConns[id]));
curConns.erase(id);
}
}
/// Returns true if this stream has at least one connected client.
bool Controller::hasViewers(std::string streamName){
if (curConns.size()){
for (std::map<unsigned long, statStorage>::iterator it = curConns.begin(); it != curConns.end(); it++){
if (it->second.streamName == streamName){
return true;
}
}
}
return false;
}
/// This takes a "clients" request, and fills in the response data.
///
/// \api
/// `"client"` requests take the form of:
/// ~~~~~~~~~~~~~~~{.js}
/// {
/// //array of streamnames to accumulate. Empty means all.
/// "streams": ["streama", "streamb", "streamc"],
/// //array of protocols to accumulate. Empty means all.
/// "protocols": ["HLS", "HSS"],
/// //list of requested data fields. Empty means all.
/// "fields": ["host", "stream", "protocol", "conntime", "position", "down", "up", "downbps", "upbps"],
/// //unix timestamp of measuring moment. Negative means X seconds ago. Empty means now.
/// "time": 1234567
/// }
/// ~~~~~~~~~~~~~~~
/// and are responded to as:
/// ~~~~~~~~~~~~~~~{.js}
/// {
/// //unix timestamp of data. Always present, always absolute.
/// "time": 1234567,
/// //array of actually represented data fields.
/// "fields": [...]
/// //for all clients, the data in the order they appear in the "fields" field.
/// "data": [[x, y, z], [x, y, z], [x, y, z]]
/// }
/// ~~~~~~~~~~~~~~~
void Controller::fillClients(JSON::Value & req, JSON::Value & rep){
//first, figure out the timestamp wanted
long long int reqTime = 0;
if (req.isMember("time")){
reqTime = req["time"].asInt();
}
//to make sure no nasty timing business takes place, we store the case "now" as a bool.
bool now = (reqTime == 0);
//add the current time, if negative or zero.
if (reqTime <= 0){
reqTime += Util::epoch();
}
//at this point, reqTime is the absolute timestamp.
rep["time"] = reqTime; //fill the absolute timestamp
unsigned int fields = 0;
//next, figure out the fields wanted
if (req.isMember("fields") && req["fields"].size()){
for (JSON::ArrIter it = req["fields"].ArrBegin(); it != req["fields"].ArrEnd(); it++){
if ((*it).asStringRef() == "host"){fields |= STAT_CLI_HOST;}
if ((*it).asStringRef() == "stream"){fields |= STAT_CLI_STREAM;}
if ((*it).asStringRef() == "protocol"){fields |= STAT_CLI_PROTO;}
if ((*it).asStringRef() == "conntime"){fields |= STAT_CLI_CONNTIME;}
if ((*it).asStringRef() == "position"){fields |= STAT_CLI_POSITION;}
if ((*it).asStringRef() == "down"){fields |= STAT_CLI_DOWN;}
if ((*it).asStringRef() == "up"){fields |= STAT_CLI_UP;}
if ((*it).asStringRef() == "downbps"){fields |= STAT_CLI_BPS_DOWN;}
if ((*it).asStringRef() == "upbps"){fields |= STAT_CLI_BPS_UP;}
}
}
//select all, if none selected
if (!fields){fields = STAT_CLI_ALL;}
//figure out what streams are wanted
std::set<std::string> streams;
if (req.isMember("streams") && req["streams"].size()){
for (JSON::ArrIter it = req["streams"].ArrBegin(); it != req["streams"].ArrEnd(); it++){
streams.insert((*it).asStringRef());
}
}
//figure out what protocols are wanted
std::set<std::string> protos;
if (req.isMember("protocols") && req["protocols"].size()){
for (JSON::ArrIter it = req["protocols"].ArrBegin(); it != req["protocols"].ArrEnd(); it++){
protos.insert((*it).asStringRef());
}
}
//output the selected fields
rep["fields"].null();
if (fields & STAT_CLI_HOST){rep["fields"].append("host");}
if (fields & STAT_CLI_STREAM){rep["fields"].append("stream");}
if (fields & STAT_CLI_PROTO){rep["fields"].append("protocol");}
if (fields & STAT_CLI_CONNTIME){rep["fields"].append("conntime");}
if (fields & STAT_CLI_POSITION){rep["fields"].append("position");}
if (fields & STAT_CLI_DOWN){rep["fields"].append("down");}
if (fields & STAT_CLI_UP){rep["fields"].append("up");}
if (fields & STAT_CLI_BPS_DOWN){rep["fields"].append("downbps");}
if (fields & STAT_CLI_BPS_UP){rep["fields"].append("upbps");}
//output the data itself
rep["data"].null();
//start with current connections
if (curConns.size()){
for (std::map<unsigned long, statStorage>::iterator it = curConns.begin(); it != curConns.end(); it++){
unsigned long long time = reqTime;
if (now){time = it->second.log.rbegin()->first;}
//data present and wanted? insert it!
if ((it->second.log.rbegin()->first >= time && it->second.log.begin()->first <= time) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){
JSON::Value d;
std::map<unsigned long long, statLog>::iterator statRef = it->second.log.lower_bound(time);
std::map<unsigned long long, statLog>::iterator prevRef = --(it->second.log.lower_bound(time));
if (fields & STAT_CLI_HOST){d.append(it->second.host);}
if (fields & STAT_CLI_STREAM){d.append(it->second.streamName);}
if (fields & STAT_CLI_PROTO){d.append(it->second.connector);}
if (fields & STAT_CLI_CONNTIME){d.append((long long)statRef->second.time);}
if (fields & STAT_CLI_POSITION){d.append((long long)statRef->second.lastSecond);}
if (fields & STAT_CLI_DOWN){d.append(statRef->second.down);}
if (fields & STAT_CLI_UP){d.append(statRef->second.up);}
if (fields & STAT_CLI_BPS_DOWN){
if (statRef != it->second.log.begin()){
unsigned int diff = statRef->first - prevRef->first;
d.append((statRef->second.down - prevRef->second.down) / diff);
}else{
d.append(statRef->second.down);
}
}
if (fields & STAT_CLI_BPS_UP){
if (statRef != it->second.log.begin()){
unsigned int diff = statRef->first - prevRef->first;
d.append((statRef->second.up - prevRef->second.up) / diff);
}else{
d.append(statRef->second.up);
}
}
rep["data"].append(d);
}
}
}
//if we're only interested in current, don't even bother looking at history
if (now){
return;
}
//look at history
if (oldConns.size()){
for (std::map<unsigned long long int, statStorage>::iterator it = oldConns.begin(); it != oldConns.end(); it++){
//data present and wanted? insert it!
if ((it->second.log.rbegin()->first >= (unsigned long long)reqTime && it->second.log.begin()->first <= (unsigned long long)reqTime) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){
JSON::Value d;
std::map<unsigned long long, statLog>::iterator statRef = it->second.log.lower_bound(reqTime);
std::map<unsigned long long, statLog>::iterator prevRef = --(it->second.log.lower_bound(reqTime));
if (fields & STAT_CLI_HOST){d.append(it->second.host);}
if (fields & STAT_CLI_STREAM){d.append(it->second.streamName);}
if (fields & STAT_CLI_PROTO){d.append(it->second.connector);}
if (fields & STAT_CLI_CONNTIME){d.append((long long)statRef->second.time);}
if (fields & STAT_CLI_POSITION){d.append((long long)statRef->second.lastSecond);}
if (fields & STAT_CLI_DOWN){d.append(statRef->second.down);}
if (fields & STAT_CLI_UP){d.append(statRef->second.up);}
if (fields & STAT_CLI_BPS_DOWN){
if (statRef != it->second.log.begin()){
unsigned int diff = statRef->first - prevRef->first;
d.append((statRef->second.down - prevRef->second.down) / diff);
}else{
d.append(statRef->second.down);
}
}
if (fields & STAT_CLI_BPS_UP){
if (statRef != it->second.log.begin()){
unsigned int diff = statRef->first - prevRef->first;
d.append((statRef->second.up - prevRef->second.up) / diff);
}else{
d.append(statRef->second.up);
}
}
rep["data"].append(d);
}
}
}
//all done! return is by reference, so no need to return anything here.
}
class totalsData {
public:
totalsData(){
clients = 0;
downbps = 0;
upbps = 0;
}
void add(unsigned int down, unsigned int up){
clients++;
downbps += down;
upbps += up;
}
long long clients;
long long downbps;
long long upbps;
};
/// This takes a "totals" request, and fills in the response data.
///
/// \api
/// `"totals"` requests take the form of:
/// ~~~~~~~~~~~~~~~{.js}
/// {
/// //array of streamnames to accumulate. Empty means all.
/// "streams": ["streama", "streamb", "streamc"],
/// //array of protocols to accumulate. Empty means all.
/// "protocols": ["HLS", "HSS"],
/// //list of requested data fields. Empty means all.
/// "fields": ["clients", "downbps", "upbps"],
/// //unix timestamp of data start. Negative means X seconds ago. Empty means earliest available.
/// "start": 1234567
/// //unix timestamp of data end. Negative means X seconds ago. Empty means latest available (usually 'now').
/// "end": 1234567
/// }
/// ~~~~~~~~~~~~~~~
/// and are responded to as:
/// ~~~~~~~~~~~~~~~{.js}
/// {
/// //unix timestamp of start of data. Always present, always absolute.
/// "start": 1234567,
/// //unix timestamp of end of data. Always present, always absolute.
/// "end": 1234567,
/// //array of actually represented data fields.
/// "fields": [...]
/// // Time between datapoints. Here: 10 points with each 5 seconds afterwards, followed by 10 points with each 1 second afterwards.
/// "interval": [[10, 5], [10, 1]],
/// //the data for the times as mentioned in the "interval" field, in the order they appear in the "fields" field.
/// "data": [[x, y, z], [x, y, z], [x, y, z]]
/// }
/// ~~~~~~~~~~~~~~~
void Controller::fillTotals(JSON::Value & req, JSON::Value & rep){
//first, figure out the timestamps wanted
long long int reqStart = 0;
long long int reqEnd = 0;
if (req.isMember("start")){
reqStart = req["start"].asInt();
}
if (req.isMember("end")){
reqEnd = req["end"].asInt();
}
//add the current time, if negative or zero.
if (reqStart < 0){
reqStart += Util::epoch();
}
if (reqStart == 0){
reqStart = Util::epoch() - STAT_CUTOFF;
}
if (reqEnd <= 0){
reqEnd += Util::epoch();
}
//at this point, reqStart and reqEnd are the absolute timestamp.
unsigned int fields = 0;
//next, figure out the fields wanted
if (req.isMember("fields") && req["fields"].size()){
for (JSON::ArrIter it = req["fields"].ArrBegin(); it != req["fields"].ArrEnd(); it++){
if ((*it).asStringRef() == "clients"){fields |= STAT_TOT_CLIENTS;}
if ((*it).asStringRef() == "downbps"){fields |= STAT_TOT_BPS_DOWN;}
if ((*it).asStringRef() == "upbps"){fields |= STAT_TOT_BPS_UP;}
}
}
//select all, if none selected
if (!fields){fields = STAT_TOT_ALL;}
//figure out what streams are wanted
std::set<std::string> streams;
if (req.isMember("streams") && req["streams"].size()){
for (JSON::ArrIter it = req["streams"].ArrBegin(); it != req["streams"].ArrEnd(); it++){
streams.insert((*it).asStringRef());
}
}
//figure out what protocols are wanted
std::set<std::string> protos;
if (req.isMember("protocols") && req["protocols"].size()){
for (JSON::ArrIter it = req["protocols"].ArrBegin(); it != req["protocols"].ArrEnd(); it++){
protos.insert((*it).asStringRef());
}
}
//output the selected fields
rep["fields"].null();
if (fields & STAT_TOT_CLIENTS){rep["fields"].append("clients");}
if (fields & STAT_TOT_BPS_DOWN){rep["fields"].append("downbps");}
if (fields & STAT_TOT_BPS_UP){rep["fields"].append("upbps");}
//start data collection
std::map<long long unsigned int, totalsData> totalsCount;
//start with current connections
if (curConns.size()){
for (std::map<unsigned long, statStorage>::iterator it = curConns.begin(); it != curConns.end(); it++){
//data present and wanted? insert it!
if (it->second.log.size() > 1 && (it->second.log.rbegin()->first >= (unsigned long long)reqStart || it->second.log.begin()->first <= (unsigned long long)reqEnd) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){
//keep track of the previous and current, starting at position 2 so there's always a delta down/up value.
std::map<unsigned long long, statLog>::iterator pi = it->second.log.begin();
for (std::map<unsigned long long, statLog>::iterator li = ++(it->second.log.begin()); li != it->second.log.end(); li++){
if (li->first < (unsigned long long)reqStart || pi->first > (unsigned long long)reqEnd){
continue;
}
unsigned int diff = li->first - pi->first;
unsigned int ddown = (li->second.down - pi->second.down) / diff;
unsigned int dup = (li->second.up - pi->second.up) / diff;
for (long long unsigned int t = pi->first; t < li->first; t++){
if (t >= (unsigned long long)reqStart && t <= (unsigned long long)reqEnd){
totalsCount[t].add(ddown, dup);
}
}
pi = li;//set previous iterator to log iterator
}
}
}
}
//look at history
if (oldConns.size()){
for (std::map<unsigned long long int, statStorage>::iterator it = oldConns.begin(); it != oldConns.end(); it++){
//data present and wanted? insert it!
if (it->second.log.size() > 1 && (it->second.log.rbegin()->first >= (unsigned long long)reqStart || it->second.log.begin()->first <= (unsigned long long)reqEnd) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){
//keep track of the previous and current, starting at position 2 so there's always a delta down/up value.
std::map<unsigned long long, statLog>::iterator pi = it->second.log.begin();
for (std::map<unsigned long long, statLog>::iterator li = ++(it->second.log.begin()); li != it->second.log.end(); li++){
if (li->first < (unsigned long long)reqStart || pi->first > (unsigned long long)reqEnd){
continue;
}
unsigned int diff = li->first - pi->first;
unsigned int ddown = (li->second.down - pi->second.down) / diff;
unsigned int dup = (li->second.up - pi->second.up) / diff;
for (long long unsigned int t = pi->first; t < li->first; t++){
if (t >= (unsigned long long)reqStart && t <= (unsigned long long)reqEnd){
totalsCount[t].add(ddown, dup);
}
}
pi = li;//set previous iterator to log iterator
}
}
}
}
//output the data itself
if (!totalsCount.size()){
//Oh noes! No data. We'll just reply with a bunch of nulls.
rep["start"].null();
rep["end"].null();
rep["data"].null();
rep["interval"].null();
return;
}
//yay! We have data!
rep["start"] = (long long)totalsCount.begin()->first;
rep["end"] = (long long)totalsCount.rbegin()->first;
rep["data"].null();
rep["interval"].null();
long long prevT = 0;
JSON::Value i;
for (std::map<long long unsigned int, totalsData>::iterator it = totalsCount.begin(); it != totalsCount.end(); it++){
JSON::Value d;
if (fields & STAT_TOT_CLIENTS){d.append(it->second.clients);}
if (fields & STAT_TOT_BPS_DOWN){d.append(it->second.downbps);}
if (fields & STAT_TOT_BPS_UP){d.append(it->second.upbps);}
rep["data"].append(d);
if (prevT){
if (i.size() < 2){
i.append(1ll);
i.append((long long)(it->first - prevT));
}else{
if (i[1u].asInt() != (long long)(it->first - prevT)){
rep["interval"].append(i);
i[0u] = 1ll;
i[1u] = (long long)(it->first - prevT);
}else{
i[0u] = i[0u].asInt() + 1;
}
}
}
prevT = it->first;
}
if (i.size() > 1){
rep["interval"].append(i);
i.null();
}
//all done! return is by reference, so no need to return anything here.
}

View file

@ -0,0 +1,35 @@
#include <mist/shared_memory.h>
#include <mist/timing.h>
#include <mist/defines.h>
#include <mist/json.h>
#include <string>
#include <map>
namespace Controller {
struct statLog {
long time;
long lastSecond;
long long down;
long long up;
};
class statStorage {
public:
void update(IPC::statExchange & data);
std::string host;
std::string streamName;
std::string connector;
std::map<unsigned long long, statLog> log;
};
extern std::multimap<unsigned long long int, statStorage> oldConns;
extern std::map<unsigned long, statStorage> curConns;
void parseStatistics(char * data, size_t len, unsigned int id);
void fillClients(JSON::Value & req, JSON::Value & rep);
void fillTotals(JSON::Value & req, JSON::Value & rep);
void SharedMemStats(void * config);
bool hasViewers(std::string streamName);
}

View file

@ -3,16 +3,17 @@
#include <mist/timing.h>
#include <mist/stream.h>
#include <mist/dtsc.h>
#include <mist/defines.h>
#include <mist/shared_memory.h>
#include "controller_streams.h"
#include "controller_storage.h"
#include "controller_statistics.h"
#include <sys/stat.h>
#include <map>
///\brief Holds everything unique to the controller.
namespace Controller {
std::map<std::string, int> lastBuffer; ///< Last moment of contact with all buffers.
///\brief Checks whether two streams are equal.
///\param one The first stream for the comparison.
///\param two The second stream for the comparison.
@ -43,22 +44,28 @@ namespace Controller {
if (data.isMember("source")){
URL = data["source"].asString();
}
std::string buffcmd;
if (URL == ""){
Log("STRM", "Error for stream " + name + "! Source parameter missing.");
data["error"] = "Stream offline: Missing source parameter!";
return;
}
buffcmd = "MistBuffer";
if (data.isMember("DVR") && data["DVR"].asInt() > 0){
data["DVR"] = data["DVR"].asInt();
buffcmd += " -t " + data["DVR"].asString();
}
buffcmd += " -s " + name;
if (URL.substr(0, 4) == "push"){
std::string pusher = URL.substr(7);
Util::Procs::Start(name, Util::getMyPath() + buffcmd + " " + pusher);
Log("BUFF", "(re)starting stream buffer " + name + " for push data from " + pusher);
if (hasViewers(name)){
data["meta"].null();
IPC::sharedPage streamIndex(name,0,false,false);
if (!streamIndex.mapped){
return;
}
unsigned int i = 0;
JSON::fromDTMI((const unsigned char*)streamIndex.mapped + 8, streamIndex.len - 8, i, data["meta"]);
if (data["meta"].isMember("tracks") && data["meta"]["tracks"].size()){
for(JSON::ObjIter trackIt = data["meta"]["tracks"].ObjBegin(); trackIt != data["meta"]["tracks"].ObjEnd(); trackIt++){
trackIt->second.removeMember("fragments");
trackIt->second.removeMember("keys");
trackIt->second.removeMember("parts");
}
}
}
}else{
if (URL.substr(0, 1) == "/"){
data.removeMember("error");
@ -74,6 +81,12 @@ namespace Controller {
getMeta = true;
data["l_meta"] = (long long)fileinfo.st_mtime;
}
if (stat((URL+".dtsh").c_str(), &fileinfo) == 0 && !S_ISDIR(fileinfo.st_mode)){
if ( !data.isMember("h_meta") || fileinfo.st_mtime != data["h_meta"].asInt()){
getMeta = true;
data["h_meta"] = (long long)fileinfo.st_mtime;
}
}
if ( !getMeta && data.isMember("meta") && data["meta"].isMember("tracks")){
for (JSON::ObjIter trIt = data["meta"]["tracks"].ObjBegin(); trIt != data["meta"]["tracks"].ObjEnd(); trIt++){
if (trIt->second["codec"] == "H264"){
@ -107,6 +120,9 @@ namespace Controller {
getMeta = true;
}
if (getMeta){
if ((URL.substr(URL.size() - 5) != ".dtsc") && (stat((URL+".dtsh").c_str(), &fileinfo) != 0)){
Util::Stream::getStream(name);
}
char * tmp_cmd[3] = {0, 0, 0};
std::string mistinfo = Util::getMyPath() + "MistInfo";
tmp_cmd[0] = (char*)mistinfo.c_str();
@ -127,7 +143,7 @@ namespace Controller {
Util::Procs::getOutputOf(tmp_cmd);
data.removeMember("meta");
}
if (Util::epoch() - lastBuffer[name] > 5){
if (!hasViewers(name)){
if ( !data.isMember("error")){
data["error"] = "Available";
}
@ -136,9 +152,11 @@ namespace Controller {
data["online"] = 1;
}
return; //MistPlayer handles VoD
}else{
/// \todo Implement ffmpeg pulling again?
//Util::Procs::Start(name, "ffmpeg -re -async 2 -i " + URL + " -f flv -", Util::getMyPath() + "MistFLV2DTSC", Util::getMyPath() + buffcmd);
//Log("BUFF", "(re)starting stream buffer " + name + " for ffmpeg data: ffmpeg -re -async 2 -i " + URL + " -f flv -");
}
Util::Procs::Start(name, "ffmpeg -re -async 2 -i " + URL + " -f flv -", Util::getMyPath() + "MistFLV2DTSC", Util::getMyPath() + buffcmd);
Log("BUFF", "(re)starting stream buffer " + name + " for ffmpeg data: ffmpeg -re -async 2 -i " + URL + " -f flv -");
}
}
@ -153,7 +171,7 @@ namespace Controller {
if (!jit->second.isMember("name")){
jit->second["name"] = jit->first;
}
if (currTime - lastBuffer[jit->first] > 5){
if (!hasViewers(jit->first)){
if (jit->second.isMember("source") && jit->second["source"].asString().substr(0, 1) == "/" && jit->second.isMember("error")
&& jit->second["error"].asString().substr(0,15) != "Stream offline:"){
jit->second["online"] = 2;
@ -230,10 +248,7 @@ namespace Controller {
}
}
///\brief Parse a given stream configuration.
///\param in The requested configuration.
///\param out The new configuration after parsing.
void CheckStreams(JSON::Value & in, JSON::Value & out){
void AddStreams(JSON::Value & in, JSON::Value & out){
//check for new streams and updates
for (JSON::ObjIter jit = in.ObjBegin(); jit != in.ObjEnd(); jit++){
if (out.isMember(jit->first)){
@ -263,6 +278,14 @@ namespace Controller {
startStream(jit->first, out[jit->first]);
}
}
}
///\brief Parse a given stream configuration.
///\param in The requested configuration.
///\param out The new configuration after parsing.
void CheckStreams(JSON::Value & in, JSON::Value & out){
//check for new streams and updates
AddStreams(in, out);
//check for deleted streams
std::set<std::string> toDelete;

View file

@ -1,12 +1,11 @@
#include <mist/json.h>
namespace Controller {
extern std::map<std::string, int> lastBuffer; ///< Last moment of contact with all buffers.
bool streamsEqual(JSON::Value & one, JSON::Value & two);
void startStream(std::string name, JSON::Value & data);
void CheckAllStreams(JSON::Value & data);
void CheckStreams(JSON::Value & in, JSON::Value & out);
void AddStreams(JSON::Value & in, JSON::Value & out);
struct liveCheck {
long long int lastms;

View file

@ -19,19 +19,21 @@ namespace Converters {
int curIndex = 1;
F.parseNext();
while ( !F.getJSON().isNull()){
std::string tmp;
while (F.getPacket()){
std::cout << curIndex++ << std::endl;
long long unsigned int time = F.getJSON()["time"].asInt();
long long unsigned int time = F.getPacket().getTime();
std::cout << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
std::cout << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
std::cout << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
std::cout << std::setfill('0') << std::setw(3) << time % 1000 << " --> ";
time += F.getJSON()["duration"].asInt();
time += F.getPacket().getInt("duration");
std::cout << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
std::cout << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
std::cout << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
std::cout << std::setfill('0') << std::setw(3) << time % 1000 << std::endl;
std::cout << F.getJSON()["data"].asString() << std::endl;
F.getPacket().getString("data", tmp);
std::cout << tmp << std::endl;
F.parseNext();
}
return 0;

View file

@ -15,7 +15,7 @@ namespace Converters {
DTSC::File F(conf.getString("filename"));
F.seek_bpos(0);
F.parseNext();
JSON::Value oriheader = F.getJSON();
JSON::Value oriheader = F.getPacket().toJSON();
DTSC::Meta meta(F.getMeta());
if (meta.isFixed() && !conf.getBool("force")){
@ -26,9 +26,11 @@ namespace Converters {
meta.reset();
int bPos = F.getBytePos();
F.parseNext();
while ( !F.getJSON().isNull()){
F.getJSON()["bpos"] = bPos;
meta.update(F.getJSON());
JSON::Value newPack;
while ( F.getPacket()){
newPack = F.getPacket().toJSON();
newPack["bpos"] = bPos;
meta.update(newPack);
bPos = F.getBytePos();
F.parseNext();
}

View file

@ -128,10 +128,11 @@ namespace Converters {
inFiles[sortIt->second.fileName].selectTracks(trackSelector);
inFiles[sortIt->second.fileName].seek_time(sortIt->second.keyTime);
inFiles[sortIt->second.fileName].seekNext();
while (inFiles[sortIt->second.fileName].getJSON() && inFiles[sortIt->second.fileName].getBytePos() <= sortIt->second.endBPos && !inFiles[sortIt->second.fileName].reachedEOF()){
if (inFiles[sortIt->second.fileName].getJSON()["trackid"].asInt() == sortIt->second.trackID){
inFiles[sortIt->second.fileName].getJSON()["trackid"] = trackMapping[sortIt->second.fileName][sortIt->second.trackID];
outFile.writePacket(inFiles[sortIt->second.fileName].getJSON());
while (inFiles[sortIt->second.fileName].getPacket() && inFiles[sortIt->second.fileName].getBytePos() <= sortIt->second.endBPos && !inFiles[sortIt->second.fileName].reachedEOF()){
if (inFiles[sortIt->second.fileName].getPacket().getTrackId() == sortIt->second.trackID){
JSON::Value tmp = inFiles[sortIt->second.fileName].getPacket().toJSON();
tmp["trackid"] = trackMapping[sortIt->second.fileName][sortIt->second.trackID];
outFile.writePacket(tmp);
}
inFiles[sortIt->second.fileName].seekNext();
}

View file

@ -1,4 +1,4 @@
#include"oggconv.h"
#include "oggconv.h"
#include <stdlib.h>
#include <mist/bitstream.h>
@ -11,32 +11,61 @@ namespace OGG{
srand (Util::getMS());//randomising with milliseconds from boot
std::vector<unsigned int> curSegTable;
//trackInf.clear();
//Creating ID headers for theora and vorbis
/// \todo This is utter rubbish right now.
/// \todo We shouldn't assume all possible tracks are selected.
/// \todo We shouldn't be buffering, but sending.
/// \todo Especially not in a std::string. (Why, god, why?!)
//Creating headers
for ( std::map<int,DTSC::Track>::iterator it = meta.tracks.begin(); it != meta.tracks.end(); it ++) {
trackInf[it->second.trackID].codec = it->second.codec;
trackInf[it->second.trackID].OGGSerial = rand() % 0xFFFFFFFE +1; //initialising on a random not 0 number
trackInf[it->second.trackID].seqNum = 0;
if (it->second.codec == "theora"){
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(2);//headertype 2 = Begin of Stream
curOggPage.setGranulePosition(0);
trackInf[it->second.trackID].OGGSerial = rand() % 0xFFFFFFFE +1; //initialising on a random not 0 number
curOggPage.setBitstreamSerialNumber(trackInf[it->second.trackID].OGGSerial);
trackInf[it->second.trackID].seqNum = 0;
curOggPage.setPageSequenceNumber(trackInf[it->second.trackID].seqNum++);
curSegTable.clear();
curSegTable.push_back(it->second.idHeader.size());
curOggPage.setSegmentTable(curSegTable);
curOggPage.setPayload((char*)it->second.idHeader.c_str(), it->second.idHeader.size());
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
//pages.push_back(curOggPage);
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
trackInf[it->second.trackID].codec = it->second.codec;
if (it->second.codec == "theora"){
trackInf[it->second.trackID].lastKeyFrame = 1;
trackInf[it->second.trackID].sinceKeyFrame = 0;
theora::header tempHead;
std::string tempString = it->second.idHeader;
tempHead.read((char*)tempString.c_str(),42);
trackInf[it->second.trackID].significantValue = tempHead.getKFGShift();
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(0);//headertype 0 = normal
curOggPage.setGranulePosition(0);
curOggPage.setBitstreamSerialNumber(trackInf[it->second.trackID].OGGSerial);
curOggPage.setPageSequenceNumber(trackInf[it->second.trackID].seqNum++);
curSegTable.clear();
curSegTable.push_back(it->second.commentHeader.size());
curSegTable.push_back(it->second.init.size());
curOggPage.setSegmentTable(curSegTable);
std::string fullHeader = it->second.commentHeader + it->second.init;
curOggPage.setPayload((char*)fullHeader.c_str(),fullHeader.size());
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
}else if (it->second.codec == "vorbis"){
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(2);//headertype 2 = Begin of Stream
curOggPage.setGranulePosition(0);
curOggPage.setBitstreamSerialNumber(trackInf[it->second.trackID].OGGSerial);
curOggPage.setPageSequenceNumber(trackInf[it->second.trackID].seqNum++);
curSegTable.clear();
curSegTable.push_back(it->second.idHeader.size());
curOggPage.setSegmentTable(curSegTable);
curOggPage.setPayload((char*)it->second.idHeader.c_str(), it->second.idHeader.size());
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
trackInf[it->second.trackID].lastKeyFrame = 0;
trackInf[it->second.trackID].sinceKeyFrame = 0;
trackInf[it->second.trackID].prevBlockFlag = -1;
@ -57,12 +86,6 @@ namespace OGG{
tempHead.read((char*)tempString.c_str(),tempString.size());
trackInf[it->second.trackID].vorbisModes = tempHead.readModeDeque(audioChannels);
trackInf[it->second.trackID].hadFirst = false;
}
}
//Creating remaining headers for theora and vorbis
//for tracks in header
//create standard page with comment (empty) en setup header(init)
for ( std::map<int,DTSC::Track>::iterator it = meta.tracks.begin(); it != meta.tracks.end(); it ++) {
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(0);//headertype 0 = normal
@ -77,6 +100,38 @@ namespace OGG{
curOggPage.setPayload((char*)fullHeader.c_str(),fullHeader.size());
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
}else if (it->second.codec == "opus"){
//OpusHead page
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(2);//headertype 2 = Begin of Stream
curOggPage.setGranulePosition(0);
curOggPage.setBitstreamSerialNumber(trackInf[it->second.trackID].OGGSerial);
curOggPage.setPageSequenceNumber(trackInf[it->second.trackID].seqNum++);
curSegTable.clear();
curSegTable.push_back(19);
curOggPage.setSegmentTable(curSegTable);
//version = 1, channels = 2, preskip=0x138, origRate=48k, gain=0, channelmap=0
//we can safely hard-code these as everything is already overridden elsewhere anyway
// (except preskip - but this seems to be 0x138 for all files, and doesn't hurt much if it's wrong anyway)
curOggPage.setPayload((char*)"OpusHead\001\002\070\001\200\273\000\000\000\000\000", 19);
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
//end of OpusHead, now moving on to OpusTags
curOggPage.clear();
curOggPage.setVersion();
curOggPage.setHeaderType(2);//headertype 2 = Begin of Stream
curOggPage.setGranulePosition(0);
curOggPage.setBitstreamSerialNumber(trackInf[it->second.trackID].OGGSerial);
curOggPage.setPageSequenceNumber(trackInf[it->second.trackID].seqNum++);
curSegTable.clear();
curSegTable.push_back(26);
curOggPage.setSegmentTable(curSegTable);
//we send an encoder value of "MistServer" and no further tags
curOggPage.setPayload((char*)"OpusTags\012\000\000\000MistServer\000\000\000\000", 26);
curOggPage.setCRCChecksum(curOggPage.calcChecksum());
parsedPages += std::string(curOggPage.getPage(), curOggPage.getPageSize());
}
}
}
@ -174,6 +229,8 @@ namespace OGG{
//add to granule position
trackInf[DTSCID].lastKeyFrame += curPCMSamples;
lastGran = trackInf[DTSCID].lastKeyFrame;
} else if (trackInf[DTSCID].codec == "opus"){
lastGran = (int)((DTSCPart["time"].asInt() * 48.0) / 120.0 + 0.5) * 120;
}
//}
//last parts of page put out

338
src/input/input.cpp Normal file
View file

@ -0,0 +1,338 @@
#include <semaphore.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <mist/defines.h>
#include "input.h"
#include <sstream>
#include <fstream>
#include <iterator>
namespace Mist {
Input * Input::singleton = NULL;
void Input::userCallback(char * data, size_t len, unsigned int id){
long tid = ((long)(data[0]) << 24) | ((long)(data[1]) << 16) | ((long)(data[2]) << 8) | ((long)(data[3]));
long keyNum = ((long)(data[4]) << 8) | ((long)(data[5]));
bufferFrame(tid, keyNum + 1);//Try buffer next frame
}
void Input::doNothing(char * data, size_t len, unsigned int id){
DEBUG_MSG(DLVL_DONTEVEN, "Doing 'nothing'");
for (int i = 0; i < 5; i++){
int tmp = ((long)(data[i*6]) << 24) | ((long)(data[i*6 + 1]) << 16) | ((long)(data[i*6 + 2]) << 8) | data[i*6 + 3];
if (tmp){
singleton->userCallback(data + (i*6), 6, id);//call the userCallback for this input
}
}
}
Input::Input(Util::Config * cfg) {
config = cfg;
JSON::Value option;
option["long"] = "json";
option["short"] = "j";
option["help"] = "Output MistIn info in JSON format, then exit.";
option["value"].append(0ll);
config->addOption("json", option);
option.null();
option["arg_num"] = 1ll;
option["arg"] = "string";
option["help"] = "Name of the input file or - for stdin";
option["value"].append("-");
config->addOption("input", option);
option.null();
option["arg_num"] = 2ll;
option["arg"] = "string";
option["help"] = "Name of the output file or - for stdout";
option["value"].append("-");
config->addOption("output", option);
option.null();
option["arg"] = "string";
option["short"] = "s";
option["long"] = "stream";
option["help"] = "The name of the stream that this connector will transmit.";
config->addOption("streamname", option);
option.null();
option["short"] = "p";
option["long"] = "player";
option["help"] = "Makes this connector into a player";
config->addOption("player", option);
packTime = 0;
lastActive = Util::epoch();
playing = 0;
playUntil = 0;
singleton = this;
isBuffer = false;
}
int Input::run() {
if (config->getBool("json")) {
std::cerr << capa.toString() << std::endl;
return 0;
}
if (!setup()) {
std::cerr << config->getString("cmd") << " setup failed." << std::endl;
return 0;
}
if (!readHeader()) {
std::cerr << "Reading header for " << config->getString("input") << " failed." << std::endl;
return 0;
}
parseHeader();
if (!config->getBool("player")){
//check filename for no -
if (config->getString("output") != "-"){
//output to dtsc
DTSC::Meta newMeta = myMeta;
newMeta.reset();
JSON::Value tempVal;
std::ofstream file(config->getString("output").c_str());
long long int bpos = 0;
seek(0);
getNext();
while (lastPack){
tempVal = lastPack.toJSON();
tempVal["bpos"] = bpos;
newMeta.update(tempVal);
file << std::string(lastPack.getData(), lastPack.getDataLen());
bpos += lastPack.getDataLen();
getNext();
}
//close file
file.close();
//create header
file.open((config->getString("output")+".dtsh").c_str());
file << newMeta.toJSON().toNetPacked();
file.close();
}else{
DEBUG_MSG(DLVL_FAIL,"No filename specified, exiting");
}
}else{
//after this player functionality
metaPage.init(config->getString("streamname"), (isBuffer ? 8388608 : myMeta.getSendLen()), true);
myMeta.writeTo(metaPage.mapped);
userPage.init(config->getString("streamname") + "_users", 30, true);
if (!isBuffer){
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
bufferFrame(it->first, 0);
}
}
sem_t * waiting = sem_open(std::string("/wait_" + config->getString("streamname")).c_str(), O_CREAT | O_RDWR, ACCESSPERMS, 0);
if (waiting == SEM_FAILED){
DEBUG_MSG(DLVL_FAIL, "Failed to open semaphore - cancelling");
return -1;
}
sem_post(waiting);
sem_close(waiting);
DEBUG_MSG(DLVL_HIGH,"Pre-While");
long long int activityCounter = Util::getMS();
while ((Util::getMS() - activityCounter) < 10000){//1minute timeout
DEBUG_MSG(DLVL_HIGH, "Timer running");
Util::sleep(1000);
removeUnused();
userPage.parseEach(doNothing);
if (userPage.amount){
activityCounter = Util::getMS();
DEBUG_MSG(DLVL_HIGH, "Connected users: %d", userPage.amount);
}
}
DEBUG_MSG(DLVL_DEVEL,"Closing clean");
//end player functionality
}
return 0;
}
void Input::removeUnused(){
for (std::map<unsigned int, std::map<unsigned int, unsigned int> >::iterator it = pageCounter.begin(); it != pageCounter.end(); it++){
for (std::map<unsigned int, unsigned int>::iterator it2 = it->second.begin(); it2 != it->second.end(); it2++){
it2->second--;
}
bool change = true;
while (change){
change = false;
for (std::map<unsigned int, unsigned int>::iterator it2 = it->second.begin(); it2 != it->second.end(); it2++){
if (!it2->second){
DEBUG_MSG(DLVL_DEVEL, "Erasing page %u:%u", it->first, it2->first);
pagesByTrack[it->first].erase(it2->first);
pageCounter[it->first].erase(it2->first);
change = true;
break;
}
}
}
}
}
void Input::parseHeader(){
DEBUG_MSG(DLVL_DEVEL,"Parsing the header");
//Select all tracks for parsing header
selectedTracks.clear();
std::stringstream trackSpec;
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
DEBUG_MSG(DLVL_VERYHIGH, "Track %d encountered", it->first);
//selectedTracks.insert(it->first);
if (trackSpec.str() != ""){
trackSpec << " ";
}
trackSpec << it->first;
DEBUG_MSG(DLVL_VERYHIGH, "Trackspec now %s", trackSpec.str().c_str());
for (std::deque<DTSC::Key>::iterator it2 = it->second.keys.begin(); it2 != it->second.keys.end(); it2++){
keyTimes[it->first].insert(it2->getTime());
}
}
trackSelect(trackSpec.str());
std::map<int, DTSCPageData> curData;
std::map<int, booking> bookKeeping;
seek(0);
getNext();
while(lastPack){//loop through all
int tid = lastPack.getTrackId();
if (!bookKeeping.count(tid)){
bookKeeping[tid].first = 0;
bookKeeping[tid].curPart = 0;
bookKeeping[tid].curKey = 0;
curData[tid].lastKeyTime = 0xFFFFFFFF;
curData[tid].keyNum = 1;
curData[tid].partNum = 0;
curData[tid].dataSize = 0;
curData[tid].curOffset = 0;
curData[tid].firstTime = myMeta.tracks[tid].keys[0].getTime();
char tmpId[20];
sprintf(tmpId, "%d", tid);
indexPages[tid].init(config->getString("streamname") + tmpId, 8192, true);//Pages of 8kb in size, room for 512 parts.
}
if (myMeta.tracks[tid].keys[bookKeeping[tid].curKey].getParts() == curData[tid].partNum){
if (curData[tid].dataSize > 8388608) {
pagesByTrack[tid][bookKeeping[tid].first] = curData[tid];
bookKeeping[tid].first += curData[tid].keyNum;
curData[tid].keyNum = 0;
curData[tid].dataSize = 0;
curData[tid].firstTime = myMeta.tracks[tid].keys[bookKeeping[tid].curKey].getTime();
}
bookKeeping[tid].curKey++;
curData[tid].keyNum++;
curData[tid].partNum = 0;
}
curData[tid].dataSize += lastPack.getDataLen();
curData[tid].partNum ++;
bookKeeping[tid].curPart ++;
getNext(false);
}
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
if (curData.count(it->first) && !pagesByTrack[it->first].count(bookKeeping[it->first].first)){
pagesByTrack[it->first][bookKeeping[it->first].first] = curData[it->first];
}
if (!pagesByTrack.count(it->first)){
DEBUG_MSG(DLVL_WARN, "No pages for track %d found", it->first);
}else{
DEBUG_MSG(DLVL_HIGH, "Track %d (%s) split into %lu pages", it->first, myMeta.tracks[it->first].codec.c_str(), pagesByTrack[it->first].size());
for (std::map<int, DTSCPageData>::iterator it2 = pagesByTrack[it->first].begin(); it2 != pagesByTrack[it->first].end(); it2++){
}
}
}
}
bool Input::bufferFrame(int track, int keyNum){
DEBUG_MSG(DLVL_DONTEVEN, "Attempting to buffer %d:%d", track, keyNum);
if (!pagesByTrack.count(track)){
return false;
}
std::map<int, DTSCPageData> ::iterator it = pagesByTrack[track].upper_bound(keyNum);
if (it == pagesByTrack[track].begin()){
return false;
}
it --;
int pageNum = it->first;
pageCounter[track][pageNum] = 15;///Keep page 15seconds in memory after last use
if (!dataPages[track].count(pageNum)){
char pageId[100];
int pageIdLen = sprintf(pageId, "%s%d_%d", config->getString("streamname").c_str(), track, pageNum);
std::string tmpString(pageId, pageIdLen);
dataPages[track][pageNum].init(tmpString, it->second.dataSize, true);
DEBUG_MSG(DLVL_HIGH, "Buffering page %d through %d / %lu", pageNum, pageNum + it->second.keyNum, myMeta.tracks[track].keys.size());
std::stringstream trackSpec;
trackSpec << track;
trackSelect(trackSpec.str());
}else{
return true;
}
seek(myMeta.tracks[track].keys[pageNum].getTime());
long long unsigned int stopTime = myMeta.tracks[track].lastms + 1;
if ((int)myMeta.tracks[track].keys.size() > pageNum + it->second.keyNum){
stopTime = myMeta.tracks[track].keys[pageNum + it->second.keyNum].getTime();
}
DEBUG_MSG(DLVL_HIGH, "Playing from %ld to %llu", myMeta.tracks[track].keys[pageNum].getTime(), stopTime);
getNext();
while (lastPack && lastPack.getTime() < stopTime){
if (it->second.curOffset + lastPack.getDataLen() > pagesByTrack[track][pageNum].dataSize){
DEBUG_MSG(DLVL_WARN, "Trying to write %u bytes past the end of page %u/%u", lastPack.getDataLen(), track, pageNum);
return true;
}else{
memcpy(dataPages[track][pageNum].mapped + it->second.curOffset, lastPack.getData(), lastPack.getDataLen());
it->second.curOffset += lastPack.getDataLen();
}
getNext();
}
for (int i = 0; i < indexPages[track].len / 8; i++){
if (((long long int*)indexPages[track].mapped)[i] == 0){
((long long int*)indexPages[track].mapped)[i] = (((long long int)htonl(pageNum)) << 32) | htonl(it->second.keyNum);
break;
}
}
return true;
}
bool Input::atKeyFrame(){
static std::map<int, int> lastSeen;
//not in keyTimes? We're not at a keyframe.
unsigned int c = keyTimes[lastPack.getTrackId()].count(lastPack.getTime());
if (!c){
return false;
}
//skip double times
if (lastSeen.count(lastPack.getTrackId()) && lastSeen[lastPack.getTrackId()] == lastPack.getTime()){
return false;
}
//set last seen, and return true
lastSeen[lastPack.getTrackId()] = lastPack.getTime();
return true;
}
void Input::play(int until) {
playing = -1;
playUntil = until;
initialTime = 0;
benchMark = Util::getMS();
}
void Input::playOnce() {
if (playing <= 0) {
playing = 1;
}
++playing;
benchMark = Util::getMS();
}
void Input::quitPlay() {
playing = 0;
}
}

84
src/input/input.h Normal file
View file

@ -0,0 +1,84 @@
#include <set>
#include <map>
#include <cstdlib>
#include <mist/config.h>
#include <mist/json.h>
#include <mist/timing.h>
#include <mist/dtsc.h>
#include <mist/shared_memory.h>
namespace Mist {
struct DTSCPageData {
DTSCPageData() : keyNum(0), partNum(0), dataSize(0), curOffset(0), firstTime(0){}
int keyNum;///<The number of keyframes in this page.
int partNum;///<The number of parts in this page.
unsigned long long int dataSize;///<The full size this page should be.
unsigned long long int curOffset;///<The current write offset in the page.
unsigned long long int firstTime;///<The first timestamp of the page.
unsigned long lastKeyTime;///<The last key time encountered on this track.
};
struct booking {
int first;
int curKey;
int curPart;
};
class Input {
public:
Input(Util::Config * cfg);
int run();
virtual ~Input() {};
protected:
static void doNothing(char * data, size_t len, unsigned int id);
virtual bool setup() = 0;
virtual bool readHeader() = 0;
virtual bool atKeyFrame();
virtual void getNext(bool smart = true) {};
virtual void seek(int seekTime){};
void play(int until = 0);
void playOnce();
void quitPlay();
virtual void removeUnused();
virtual void trackSelect(std::string trackSpec){};
virtual void userCallback(char * data, size_t len, unsigned int id);
void parseHeader();
bool bufferFrame(int track, int keyNum);
unsigned int packTime;///Media-timestamp of the last packet.
int lastActive;///Timestamp of the last time we received or sent something.
int initialTime;
int playing;
unsigned int playUntil;
unsigned int benchMark;
std::set<int> selectedTracks;
bool isBuffer;
Util::Config * config;
JSON::Value capa;
Socket::Connection StatsSocket;
DTSC::Meta myMeta;
DTSC::Packet lastPack;
std::map<int,std::set<int> > keyTimes;
IPC::sharedPage metaPage;
//Create server for user pages
IPC::sharedServer userPage;
//TrackIndex pages
std::map<int, IPC::sharedPage> indexPages;
std::map<int, std::map<int, IPC::sharedPage> > dataPages;
//Page Overview
std::map<int, std::map<int, DTSCPageData> > pagesByTrack;
std::map<unsigned int, std::map<unsigned int, unsigned int> > pageCounter;
static Input * singleton;
};
}

274
src/input/input_buffer.cpp Normal file
View file

@ -0,0 +1,274 @@
#include <iostream>
#include <cstring>
#include <cerrno>
#include <cstdlib>
#include <cstdio>
#include <string>
#include <mist/stream.h>
#include <mist/defines.h>
#include "input_buffer.h"
namespace Mist {
inputBuffer::inputBuffer(Util::Config * cfg) : Input(cfg) {
JSON::Value option;
option["arg"] = "integer";
option["long"] = "buffer";
option["short"] = "b";
option["help"] = "Buffertime for this stream.";
option["value"].append(30000LL);
config->addOption("bufferTime", option);
capa["desc"] = "Enables buffered live input";
capa["codecs"][0u][0u].append("*");
capa["codecs"][0u][1u].append("*");
capa["codecs"][0u][2u].append("*");
capa["codecs"][0u][3u].append("*");
capa["codecs"][0u][4u].append("*");
capa["codecs"][0u][5u].append("*");
capa["codecs"][0u][6u].append("*");
capa["codecs"][0u][7u].append("*");
capa["codecs"][0u][8u].append("*");
capa["codecs"][0u][9u].append("*");
DEBUG_MSG(DLVL_DEVEL, "Started MistInBuffer");
isBuffer = true;
singleton = this;
bufferTime = 0;
cutTime = 0;
}
void inputBuffer::updateMeta(){
long long unsigned int firstms = 0xFFFFFFFFFFFFFFFF;
long long unsigned int lastms = 0;
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.firstms < firstms){
firstms = it->second.firstms;
}
if (it->second.firstms > lastms){
lastms = it->second.lastms;
}
}
myMeta.bufferWindow = lastms - firstms;
myMeta.writeTo(metaPage.mapped);
}
bool inputBuffer::removeKey(unsigned int tid){
if (myMeta.tracks[tid].keys.size() < 2 || myMeta.tracks[tid].fragments.size() < 2){
return false;
}
DEBUG_MSG(DLVL_HIGH, "Erasing key %d:%d", tid, myMeta.tracks[tid].keys[0].getNumber());
//remove all parts of this key
for (int i = 0; i < myMeta.tracks[tid].keys[0].getParts(); i++){
myMeta.tracks[tid].parts.pop_front();
}
//remove the key itself
myMeta.tracks[tid].keys.pop_front();
//re-calculate firstms
myMeta.tracks[tid].firstms = myMeta.tracks[tid].keys[0].getTime();
//delete the fragment if it's no longer fully buffered
if (myMeta.tracks[tid].fragments[0].getNumber() < myMeta.tracks[tid].keys[0].getNumber()){
myMeta.tracks[tid].fragments.pop_front();
myMeta.tracks[tid].missedFrags ++;
}
//if there is more than one page buffered for this track...
if (inputLoc[tid].size() > 1){
//Check if the first key starts on the second page or higher
if (myMeta.tracks[tid].keys[0].getNumber() >= (++(inputLoc[tid].begin()))->first){
//Find page in indexpage and null it
for (int i = 0; i < 8192; i += 8){
int thisKeyNum = ((((long long int *)(indexPages[tid].mapped + i))[0]) >> 32) & 0xFFFFFFFF;
if (thisKeyNum == htonl(pagesByTrack[tid].begin()->first) && ((((long long int *)(indexPages[tid].mapped + i))[0]) != 0)){
(((long long int *)(indexPages[tid].mapped + i))[0]) = 0;
}
}
DEBUG_MSG(DLVL_DEVEL, "Erasing track %d, keys %lu-%lu from buffer", tid, inputLoc[tid].begin()->first, inputLoc[tid].begin()->first + inputLoc[tid].begin()->second.keyNum - 1);
inputLoc[tid].erase(inputLoc[tid].begin());
dataPages[tid].erase(dataPages[tid].begin());
}else{
DEBUG_MSG(DLVL_HIGH, "%d still on first page (%lu - %lu)", myMeta.tracks[tid].keys[0].getNumber(), inputLoc[tid].begin()->first, inputLoc[tid].begin()->first + inputLoc[tid].begin()->second.keyNum - 1);
}
}
return true;
}
void inputBuffer::removeUnused(){
//find the earliest video keyframe stored
unsigned int firstVideo = 1;
for(std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.type == "video"){
if (it->second.firstms < firstVideo || firstVideo == 1){
firstVideo = it->second.firstms;
}
}
}
for(std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
//non-video tracks need to have a second keyframe that is <= firstVideo
if (it->second.type != "video"){
if (it->second.keys.size() < 2 || it->second.keys[1].getTime() > firstVideo){
continue;
}
}
//Buffer cutting
while(it->second.keys.size() > 1 && it->second.keys[0].getTime() < cutTime){
if (!removeKey(it->first)){break;}
}
//Buffer size management
while(it->second.keys.size() > 1 && (it->second.lastms - it->second.keys[1].getTime()) > bufferTime){
if (!removeKey(it->first)){break;}
}
}
updateMeta();
}
void inputBuffer::userCallback(char * data, size_t len, unsigned int id) {
unsigned long tmp = ((long)(data[0]) << 24) | ((long)(data[1]) << 16) | ((long)(data[2]) << 8) | ((long)(data[3]));
if (tmp & 0x80000000) {
//Track is set to "New track request", assign new track id and create shared memory page
unsigned long tNum = (givenTracks.size() ? (*givenTracks.rbegin()) : 0) + 1;
///\todo Neatify this
data[0] = (tNum >> 24) & 0xFF;
data[1] = (tNum >> 16) & 0xFF;
data[2] = (tNum >> 8) & 0xFF;
data[3] = (tNum) & 0xFF;
givenTracks.insert(tNum);
char tmpChr[100];
long tmpLen = sprintf(tmpChr, "liveStream_%s%lu", config->getString("streamname").c_str(), tNum);
metaPages[tNum].init(std::string(tmpChr, tmpLen), 8388608, true);
} else {
unsigned long tNum = ((long)(data[0]) << 24) | ((long)(data[1]) << 16) | ((long)(data[2]) << 8) | ((long)(data[3]));
if (!myMeta.tracks.count(tNum)) {
DEBUG_MSG(DLVL_DEVEL, "Tracknum not in meta: %lu, from user %u", tNum, id);
if (metaPages[tNum].mapped) {
if (metaPages[tNum].mapped[0] == 'D' && metaPages[tNum].mapped[1] == 'T') {
unsigned int len = ntohl(((int *)metaPages[tNum].mapped)[1]);
unsigned int i = 0;
JSON::Value tmpMeta;
JSON::fromDTMI((const unsigned char *)metaPages[tNum].mapped + 8, len, i, tmpMeta);
DTSC::Meta tmpTrack(tmpMeta);
int oldTNum = tmpTrack.tracks.begin()->first;
bool collision = false;
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
if (it->first == tNum) {
continue;
}
if (it->second.getIdentifier() == tmpTrack.tracks[oldTNum].getIdentifier()) {
collision = true;
break;
}
}
if (collision) {
/// \todo Erasing page for now, should do more here
DEBUG_MSG(DLVL_DEVEL, "Collision detected! Erasing page for now, should do more here");
metaPages.erase(tNum);
data[0] = 0xFF;
data[1] = 0xFF;
data[2] = 0xFF;
data[3] = 0xFF;
} else {
if (!myMeta.tracks.count(tNum)) {
myMeta.tracks[tNum] = tmpTrack.tracks[oldTNum];
data[4] = 0x00;
data[5] = 0x00;
updateMeta();
char firstPage[100];
sprintf(firstPage, "%s%lu", config->getString("streamname").c_str(), tNum);
indexPages[tNum].init(firstPage, 8192, true);
((long long int *)indexPages[tNum].mapped)[0] = htonl(1000);
///\todo Fix for non-first-key-pushing
sprintf(firstPage, "%s%lu_0", config->getString("streamname").c_str(), tNum);
///\todo Make size dynamic / other solution. 25mb is too much.
dataPages[tNum][0].init(firstPage, 26214400, true);
}
}
}
}
} else {
//First check if the previous page has been finished:
if (!inputLoc[tNum].count(dataPages[tNum].rbegin()->first) || !inputLoc[tNum][dataPages[tNum].rbegin()->first].curOffset){
if (dataPages[tNum].size() > 1){
int prevPage = (++dataPages[tNum].rbegin())->first;
//update previous page.
updateMetaFromPage(tNum, prevPage);
}
}
//update current page
int curPage = dataPages[tNum].rbegin()->first;
updateMetaFromPage(tNum, curPage);
if (inputLoc[tNum][curPage].curOffset > 8388608) {
//create new page is > 8MB
int nxtPage = curPage + inputLoc[tNum][curPage].keyNum;
char nextPageName[100];
sprintf(nextPageName, "%s%lu_%d", config->getString("streamname").c_str(), tNum, nxtPage);
dataPages[tNum][nxtPage].init(nextPageName, 20971520, true);
bool createdNew = false;
for (int i = 0; i < 8192; i += 8){
int thisKeyNum = ((((long long int *)(indexPages[tNum].mapped + i))[0]) >> 32) & 0xFFFFFFFF;
if (thisKeyNum == htonl(curPage)){
if((ntohl((((long long int*)(indexPages[tNum].mapped + i))[0]) & 0xFFFFFFFF) == 1000)){
((long long int *)(indexPages[tNum].mapped + i))[0] &= 0xFFFFFFFF00000000;
((long long int *)(indexPages[tNum].mapped + i))[0] |= htonl(inputLoc[tNum][curPage].keyNum);
}
}
if (!createdNew && (((long long int*)(indexPages[tNum].mapped + i))[0]) == 0){
createdNew = true;
((long long int *)(indexPages[tNum].mapped + i))[0] = (((long long int)htonl(nxtPage)) << 32) | htonl(1000);
}
}
}
}
}
}
void inputBuffer::updateMetaFromPage(int tNum, int pageNum){
DTSC::Packet tmpPack;
tmpPack.reInit(dataPages[tNum][pageNum].mapped + inputLoc[tNum][pageNum].curOffset, 0);
while (tmpPack) {
myMeta.update(tmpPack);
if (inputLoc[tNum][pageNum].firstTime == 0){
inputLoc[tNum][pageNum].firstTime = tmpPack.getTime();
}
//Overloaded use of .firstTime to indicate last Keytime on non-video streams;
if (myMeta.tracks[tNum].type == "video"){
inputLoc[tNum][pageNum].keyNum += tmpPack.getFlag("keyframe");
}else{
if ((tmpPack.getTime() > 5000) && ((tmpPack.getTime() - 5000) > inputLoc[tNum][pageNum].firstTime)){
inputLoc[tNum][pageNum].keyNum ++;
}
}
inputLoc[tNum][pageNum].curOffset += tmpPack.getDataLen();
tmpPack.reInit(dataPages[tNum][pageNum].mapped + inputLoc[tNum][pageNum].curOffset, 0);
}
updateMeta();
}
bool inputBuffer::setup() {
if (!bufferTime){
bufferTime = config->getInteger("bufferTime");
}
JSON::Value servConf = JSON::fromFile(Util::getTmpFolder() + "streamlist");
if (servConf.isMember("streams") && servConf["streams"].isMember(config->getString("streamname"))){
JSON::Value & streamConfig = servConf["streams"][config->getString("streamname")];
if (streamConfig.isMember("DVR") && streamConfig["DVR"].asInt()){
if (bufferTime != streamConfig["DVR"].asInt()){
DEBUG_MSG(DLVL_DEVEL, "Setting bufferTime from %u to new value of %lli", bufferTime, streamConfig["DVR"].asInt());
bufferTime = streamConfig["DVR"].asInt();
}
}
}
return true;
}
bool inputBuffer::readHeader() {
return true;
}
void inputBuffer::getNext(bool smart) {}
void inputBuffer::seek(int seekTime) {}
void inputBuffer::trackSelect(std::string trackSpec) {}
}

33
src/input/input_buffer.h Normal file
View file

@ -0,0 +1,33 @@
#include "input.h"
#include <mist/dtsc.h>
#include <mist/shared_memory.h>
namespace Mist {
class inputBuffer : public Input {
public:
inputBuffer(Util::Config * cfg);
private:
unsigned int bufferTime;
unsigned int cutTime;
protected:
//Private Functions
bool setup();
void updateMeta();
bool readHeader();
void getNext(bool smart = true);
void updateMetaFromPage(int tNum, int pageNum);
void seek(int seekTime);
void trackSelect(std::string trackSpec);
bool removeKey(unsigned int tid);
void removeUnused();
void userCallback(char * data, size_t len, unsigned int id);
std::set<unsigned long> givenTracks;
std::map<unsigned long, IPC::sharedPage> metaPages;
std::map<unsigned long, std::map<unsigned long, DTSCPageData> > inputLoc;
inputBuffer * singleton;
};
}
typedef Mist::inputBuffer mistIn;

90
src/input/input_dtsc.cpp Normal file
View file

@ -0,0 +1,90 @@
#include <iostream>
#include <cstring>
#include <cerrno>
#include <cstdlib>
#include <cstdio>
#include <string>
#include <mist/stream.h>
#include <mist/defines.h>
#include "input_dtsc.h"
namespace Mist {
inputDTSC::inputDTSC(Util::Config * cfg) : Input(cfg) {
capa["decs"] = "Enables DTSC Input";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][0u].append("theora");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("vorbis");
}
bool inputDTSC::setup() {
if (config->getString("input") == "-") {
std::cerr << "Input from stream not yet supported" << std::endl;
return false;
}
if (config->getString("output") != "-") {
std::cerr << "Output to non-stdout not yet supported" << std::endl;
}
//open File
inFile = DTSC::File(config->getString("input"));
if (!inFile) {
return false;
}
return true;
}
bool inputDTSC::readHeader() {
if (!inFile) {
return false;
}
DTSC::File tmp(config->getString("input") + ".dtsh");
if (tmp) {
myMeta = tmp.getMeta();
DEBUG_MSG(DLVL_DEVEL,"Meta read in with %lu tracks", myMeta.tracks.size());
return true;
}
if (inFile.getMeta().moreheader < 0 || inFile.getMeta().tracks.size() == 0) {
DEBUG_MSG(DLVL_FAIL,"Missing external header file");
return false;
}
myMeta = DTSC::Meta(inFile.getMeta());
DEBUG_MSG(DLVL_DEVEL,"Meta read in with %lu tracks", myMeta.tracks.size());
return true;
}
void inputDTSC::getNext(bool smart) {
if (smart){
inFile.seekNext();
}else{
inFile.parseNext();
}
lastPack = inFile.getPacket();
}
void inputDTSC::seek(int seekTime) {
inFile.seek_time(seekTime);
initialTime = 0;
playUntil = 0;
}
void inputDTSC::trackSelect(std::string trackSpec) {
selectedTracks.clear();
long long unsigned int index;
while (trackSpec != "") {
index = trackSpec.find(' ');
selectedTracks.insert(atoi(trackSpec.substr(0, index).c_str()));
if (index != std::string::npos) {
trackSpec.erase(0, index + 1);
} else {
trackSpec = "";
}
}
inFile.selectTracks(selectedTracks);
}
}

22
src/input/input_dtsc.h Normal file
View file

@ -0,0 +1,22 @@
#include "input.h"
#include <mist/dtsc.h>
namespace Mist {
class inputDTSC : public Input {
public:
inputDTSC(Util::Config * cfg);
protected:
//Private Functions
bool setup();
bool readHeader();
void getNext(bool smart = true);
void seek(int seekTime);
void trackSelect(std::string trackSpec);
DTSC::File inFile;
};
}
typedef Mist::inputDTSC mistIn;

130
src/input/input_flv.cpp Normal file
View file

@ -0,0 +1,130 @@
#include <iostream>
#include <fstream>
#include <cstring>
#include <cerrno>
#include <cstdlib>
#include <cstdio>
#include <string>
#include <mist/stream.h>
#include <mist/flv_tag.h>
#include <mist/defines.h>
#include "input_flv.h"
namespace Mist {
inputFLV::inputFLV(Util::Config * cfg) : Input(cfg) {
capa["decs"] = "Enables FLV Input";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
}
bool inputFLV::setup() {
if (config->getString("input") == "-") {
std::cerr << "Input from stream not yet supported" << std::endl;
return false;
}
if (config->getString("output") != "-") {
std::cerr << "Output to non-stdout not yet supported" << std::endl;
}
//open File
inFile = fopen(config->getString("input").c_str(), "r");
if (!inFile) {
return false;
}
return true;
}
bool inputFLV::readHeader() {
JSON::Value lastPack;
if (!inFile) {
return false;
}
//See whether a separate header file exists.
DTSC::File tmp(config->getString("input") + ".dtsh");
if (tmp){
myMeta = tmp.getMeta();
return true;
}
//Create header file from FLV data
fseek(inFile, 13, SEEK_SET);
FLV::Tag tmpTag;
long long int lastBytePos = 13;
while (!feof(inFile) && !FLV::Parse_Error){
if (tmpTag.FileLoader(inFile)){
lastPack.null();
lastPack = tmpTag.toJSON(myMeta);
lastPack["bpos"] = lastBytePos;
myMeta.update(lastPack);
lastBytePos = ftell(inFile);
}
}
if (FLV::Parse_Error){
std::cerr << FLV::Error_Str << std::endl;
return false;
}
std::ofstream oFile(std::string(config->getString("input") + ".dtsh").c_str());
oFile << myMeta.toJSON().toNetPacked();
oFile.close();
return true;
}
void inputFLV::getNext(bool smart) {
static JSON::Value thisPack;
thisPack.null();
long long int lastBytePos = ftell(inFile);
FLV::Tag tmpTag;
while (!feof(inFile) && !FLV::Parse_Error){
if (tmpTag.FileLoader(inFile)){
thisPack = tmpTag.toJSON(myMeta);
thisPack["bpos"] = lastBytePos;
if ( !selectedTracks.count(thisPack["trackid"].asInt())){
getNext();
}
break;
}
}
if (FLV::Parse_Error){
std::cerr << FLV::Error_Str << std::endl;
thisPack.null();
lastPack.null();
return;
}
std::string tmpStr = thisPack.toNetPacked();
lastPack.reInit(tmpStr.data(), tmpStr.size());
}
void inputFLV::seek(int seekTime) {
//We will seek to the corresponding keyframe of the video track if selected, otherwise audio keyframe.
//Flv files are never multi-track, so track 1 is video, track 2 is audio.
int trackSeek = (selectedTracks.count(1) ? 1 : 2);
size_t seekPos = myMeta.tracks[trackSeek].keys[0].getBpos();
for (int i = 0; i < myMeta.tracks[trackSeek].keys.size(); i++){
if (myMeta.tracks[trackSeek].keys[i].getTime() > seekTime){
DEBUG_MSG(DLVL_WARN, "Seeking to keyframe %d on track %d, timestamp %ld, bytepos %lu", i, trackSeek, myMeta.tracks[trackSeek].keys[i].getTime(), seekPos);
break;
}
seekPos = myMeta.tracks[trackSeek].keys[i].getBpos();
}
fseek(inFile, seekPos, SEEK_SET);
}
void inputFLV::trackSelect(std::string trackSpec) {
selectedTracks.clear();
long long int index;
while (trackSpec != "") {
index = trackSpec.find(' ');
selectedTracks.insert(atoi(trackSpec.substr(0, index).c_str()));
DEBUG_MSG(DLVL_WARN, "Added track %d, index = %lld, (index == npos) = %d", atoi(trackSpec.substr(0, index).c_str()), index, index == std::string::npos);
if (index != std::string::npos) {
trackSpec.erase(0, index + 1);
} else {
trackSpec = "";
}
}
}
}

21
src/input/input_flv.h Normal file
View file

@ -0,0 +1,21 @@
#include "input.h"
#include <mist/dtsc.h>
namespace Mist {
class inputFLV : public Input {
public:
inputFLV(Util::Config * cfg);
protected:
//Private Functions
bool setup();
bool readHeader();
void getNext(bool smart = true);
void seek(int seekTime);
void trackSelect(std::string trackSpec);
FILE * inFile;
};
}
typedef Mist::inputFLV mistIn;

274
src/input/input_ogg.cpp Normal file
View file

@ -0,0 +1,274 @@
#include <iostream>
#include <fstream>
#include <cstring>
#include <cerrno>
#include <cstdlib>
#include <cstdio>
#include <string>
#include <mist/stream.h>
#include <mist/ogg.h>
#include <mist/defines.h>
#include <mist/bitstream.h>
#include "input_ogg.h"
namespace Mist {
inputOGG::inputOGG(Util::Config * cfg) : Input(cfg) {
capa["decs"] = "Enables OGG Input";
capa["codecs"][0u][0u].append("theora");
capa["codecs"][0u][1u].append("vorbis");
}
bool inputOGG::setup() {
if (config->getString("input") == "-") {
std::cerr << "Input from stream not yet supported" << std::endl;
return false;
}
if (config->getString("output") != "-") {
std::cerr << "Output to non-stdout not yet supported" << std::endl;
}
//open File
inFile = fopen(config->getString("input").c_str(), "r");
if (!inFile) {
return false;
}
return true;
}
void inputOGG::parseBeginOfStream(OGG::Page & bosPage) {
long long int tid = snum2tid.size() + 1;
snum2tid[bosPage.getBitstreamSerialNumber()] = tid;
if (!memcmp(bosPage.getFullPayload() + 1, "theora", 6)) {
oggTracks[tid].codec = THEORA;
theora::header tmpHead(bosPage.getFullPayload(), bosPage.getPayloadSize());
oggTracks[tid].msPerFrame = (double)(tmpHead.getFRD() * 1000) / tmpHead.getFRN();
}
if (!memcmp(bosPage.getFullPayload() + 1, "vorbis", 6)) {
oggTracks[tid].codec = VORBIS;
vorbis::header tmpHead(bosPage.getFullPayload(), bosPage.getPayloadSize());
oggTracks[tid].msPerFrame = (double)1000 / ntohl(tmpHead.getAudioSampleRate());
}
}
bool inputOGG::readHeader() {
JSON::Value lastPack;
if (!inFile) {
return false;
}
//See whether a separate header file exists.
DTSC::File tmp(config->getString("input") + ".dtsh");
if (tmp) {
myMeta = tmp.getMeta();
return true;
}
//Create header file from OGG data
fseek(inFile, 0, SEEK_SET);
OGG::Page tmpPage;
long long int lastBytePos = 0;
while (tmpPage.read(inFile)) {
DEBUG_MSG(DLVL_WARN,"Read a page");
if (tmpPage.getHeaderType() & OGG::BeginOfStream){
parseBeginOfStream(tmpPage);
DEBUG_MSG(DLVL_WARN,"Read BOS page for stream %lu, now track %lld", tmpPage.getBitstreamSerialNumber(), snum2tid[tmpPage.getBitstreamSerialNumber()]);
}
int offset = 0;
long long int tid = snum2tid[tmpPage.getBitstreamSerialNumber()];
for (std::deque<unsigned int>::iterator it = tmpPage.getSegmentTableDeque().begin(); it != tmpPage.getSegmentTableDeque().end(); it++) {
if (oggTracks[tid].parsedHeaders) {
DEBUG_MSG(DLVL_WARN,"Parsing a page segment on track %lld", tid);
if ((it == (tmpPage.getSegmentTableDeque().end() - 1)) && (int)(tmpPage.getPageSegments()) == 255 && (int)(tmpPage.getSegmentTable()[254]) == 255) {
oggTracks[tid].contBuffer.append(tmpPage.getFullPayload() + offset, (*it));
} else {
lastPack["trackid"] = tid;
lastPack["time"] = (long long)oggTracks[tid].lastTime;
if (oggTracks[tid].contBuffer.size()) {
lastPack["data"] = oggTracks[tid].contBuffer + std::string(tmpPage.getFullPayload() + offset, (*it));
oggTracks[tid].contBuffer.clear();
} else {
lastPack["data"] = std::string(tmpPage.getFullPayload() + offset, (*it));
}
if (oggTracks[tid].codec == VORBIS) {
unsigned int blockSize = 0;
Utils::bitstreamLSBF packet;
packet.append(lastPack["data"].asString());
if (!packet.get(1)) {
blockSize = oggTracks[tid].blockSize[oggTracks[tid].vModes[packet.get(vorbis::ilog(oggTracks[tid].vModes.size() - 1))].blockFlag];
} else {
DEBUG_MSG(DLVL_WARN, "Packet type != 0");
}
oggTracks[tid].lastTime += oggTracks[tid].msPerFrame * (blockSize / oggTracks[tid].channels);
}
if (oggTracks[tid].codec == THEORA) {
oggTracks[tid].lastTime += oggTracks[tid].msPerFrame;
if (it == (tmpPage.getSegmentTableDeque().end() - 1)) {
if (oggTracks[tid].idHeader.parseGranuleUpper(oggTracks[tid].lastGran) != oggTracks[tid].idHeader.parseGranuleUpper(tmpPage.getGranulePosition())) {
lastPack["keyframe"] = 1ll;
oggTracks[tid].lastGran = tmpPage.getGranulePosition();
} else {
lastPack["interframe"] = 1ll;
}
}
}
lastPack["bpos"] = 0ll;
DEBUG_MSG(DLVL_WARN,"Parsed a packet of track %lld, new timestamp %f", tid, oggTracks[tid].lastTime);
myMeta.update(lastPack);
}
} else {
//Parsing headers
switch (oggTracks[tid].codec) {
case THEORA: {
theora::header tmpHead(tmpPage.getFullPayload() + offset, (*it));
DEBUG_MSG(DLVL_WARN,"Theora header, type %d", tmpHead.getHeaderType());
switch (tmpHead.getHeaderType()) {
case 0: {
oggTracks[tid].idHeader = tmpHead;
myMeta.tracks[tid].height = tmpHead.getPICH();
myMeta.tracks[tid].width = tmpHead.getPICW();
myMeta.tracks[tid].idHeader = std::string(tmpPage.getFullPayload() + offset, (*it));
break;
}
case 1: {
myMeta.tracks[tid].commentHeader = std::string(tmpPage.getFullPayload() + offset, (*it));
break;
}
case 2: {
myMeta.tracks[tid].codec = "theora";
myMeta.tracks[tid].trackID = tid;
myMeta.tracks[tid].type = "video";
myMeta.tracks[tid].init = std::string(tmpPage.getFullPayload() + offset, (*it));
oggTracks[tid].parsedHeaders = true;
oggTracks[tid].lastGran = 0;
break;
}
}
break;
}
case VORBIS: {
vorbis::header tmpHead(tmpPage.getFullPayload() + offset, (*it));
DEBUG_MSG(DLVL_WARN,"Vorbis header, type %d", tmpHead.getHeaderType());
switch (tmpHead.getHeaderType()) {
case 1: {
myMeta.tracks[tid].channels = tmpHead.getAudioChannels();
myMeta.tracks[tid].idHeader = std::string(tmpPage.getFullPayload() + offset, (*it));
oggTracks[tid].channels = tmpHead.getAudioChannels();
oggTracks[tid].blockSize[0] = 1 << tmpHead.getBlockSize0();
oggTracks[tid].blockSize[1] = 1 << tmpHead.getBlockSize1();
break;
}
case 3: {
myMeta.tracks[tid].commentHeader = std::string(tmpPage.getFullPayload() + offset, (*it));
break;
}
case 5: {
myMeta.tracks[tid].codec = "vorbis";
myMeta.tracks[tid].trackID = tid;
myMeta.tracks[tid].type = "audio";
DEBUG_MSG(DLVL_WARN,"Set default values");
myMeta.tracks[tid].init = std::string(tmpPage.getFullPayload() + offset, (*it));
DEBUG_MSG(DLVL_WARN,"Set init values");
oggTracks[tid].vModes = tmpHead.readModeDeque(oggTracks[tid].channels);
DEBUG_MSG(DLVL_WARN,"Set vmodevalues");
oggTracks[tid].parsedHeaders = true;
break;
}
}
break;
}
}
offset += (*it);
}
}
lastBytePos = ftell(inFile);
DEBUG_MSG(DLVL_WARN,"End of Loop, @ filepos %lld", lastBytePos);
}
DEBUG_MSG(DLVL_WARN,"Exited while loop");
std::ofstream oFile(std::string(config->getString("input") + ".dtsh").c_str());
oFile << myMeta.toJSON().toNetPacked();
oFile.close();
return true;
}
bool inputOGG::seekNextPage(int tid){
fseek(inFile, oggTracks[tid].lastPageOffset, SEEK_SET);
bool res = true;
do {
res = oggTracks[tid].myPage.read(inFile);
} while(res && snum2tid[oggTracks[tid].myPage.getBitstreamSerialNumber()] != tid);
oggTracks[tid].lastPageOffset = ftell(inFile);
oggTracks[tid].nxtSegment = 0;
return res;
}
void inputOGG::getNext(bool smart) {
if (!sortedSegments.size()){
for (std::set<int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
seekNextPage((*it));
}
}
if (sortedSegments.size()){
int tid = (*(sortedSegments.begin())).tid;
bool addedPacket = false;
while (!addedPacket){
segPart tmpPart;
if (oggTracks[tid].myPage.getSegment(oggTracks[tid].nxtSegment, tmpPart.segData, tmpPart.len)){
if (oggTracks[tid].nxtSegment == 0 && oggTracks[tid].myPage.getHeaderType() && OGG::Continued){
segment tmpSeg = *(sortedSegments.begin());
tmpSeg.parts.push_back(tmpPart);
sortedSegments.erase(sortedSegments.begin());
sortedSegments.insert(tmpSeg);
}else{
segment tmpSeg;
tmpSeg.parts.push_back(tmpPart);
tmpSeg.tid = tid;
tmpSeg.time = oggTracks[tid].lastTime;
if (oggTracks[tid].codec == VORBIS) {
std::string data;
data.append(tmpPart.segData, tmpPart.len);
unsigned int blockSize = 0;
Utils::bitstreamLSBF packet;
packet.append(data);
if (!packet.get(1)) {
blockSize = oggTracks[tid].blockSize[oggTracks[tid].vModes[packet.get(vorbis::ilog(oggTracks[tid].vModes.size() - 1))].blockFlag];
}
oggTracks[tid].lastTime += oggTracks[tid].msPerFrame * (blockSize / oggTracks[tid].channels);
}
if (oggTracks[tid].codec == THEORA) {
oggTracks[tid].lastTime += oggTracks[tid].msPerFrame;
}
sortedSegments.insert(tmpSeg);
addedPacket = true;
}
oggTracks[tid].nxtSegment ++;
}else{
if (!seekNextPage(tid)){
break;
}
}
}
std::string data;
}
}
void inputOGG::seek(int seekTime) {
DEBUG_MSG(DLVL_WARN,"Seeking is not yet supported for ogg files");
//Do nothing, seeking is not yet implemented for ogg
}
void inputOGG::trackSelect(std::string trackSpec) {
selectedTracks.clear();
long long int index;
while (trackSpec != "") {
index = trackSpec.find(' ');
selectedTracks.insert(atoi(trackSpec.substr(0, index).c_str()));
DEBUG_MSG(DLVL_WARN, "Added track %d, index = %lld, (index == npos) = %d", atoi(trackSpec.substr(0, index).c_str()), index, index == std::string::npos);
if (index != std::string::npos) {
trackSpec.erase(0, index + 1);
} else {
trackSpec = "";
}
}
}
}

65
src/input/input_ogg.h Normal file
View file

@ -0,0 +1,65 @@
#include "input.h"
#include <mist/dtsc.h>
#include <mist/ogg.h>
namespace Mist {
enum codecType {THEORA, VORBIS};
struct segPart{
char * segData;
unsigned int len;
};
struct segment{
bool operator < (const segment & rhs) const {
return time < rhs.time || (time == rhs.time && tid < rhs.tid);
}
std::vector<segPart> parts;
unsigned int time;
unsigned int tid;
};
class oggTrack{
public:
oggTrack() : lastTime(0), parsedHeaders(false), lastPageOffset(0), nxtSegment(0) { }
codecType codec;
std::string contBuffer;//buffer for continuing pages
double lastTime;
long long unsigned int lastGran;
bool parsedHeaders;
double msPerFrame;
long long unsigned int lastPageOffset;
OGG::Page myPage;
unsigned int nxtSegment;
//Codec specific elements
//theora
theora::header idHeader;
//vorbis
std::deque<vorbis::mode> vModes;
char channels;
long long unsigned int blockSize[2];
};
class inputOGG : public Input {
public:
inputOGG(Util::Config * cfg);
protected:
//Private Functions
bool setup();
bool readHeader();
bool seekNextPage(int tid);
void getNext(bool smart = true);
void seek(int seekTime);
void trackSelect(std::string trackSpec);
void parseBeginOfStream(OGG::Page & bosPage);
FILE * inFile;
std::map<long long int, long long int> snum2tid;
std::map<long long int, oggTrack> oggTracks;
std::set<segment> sortedSegments;
};
}
typedef Mist::inputOGG mistIn;

62
src/input/mist_in.cpp Normal file
View file

@ -0,0 +1,62 @@
#include <errno.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/wait.h>
#include <unistd.h>
#include <semaphore.h>
#include INPUTTYPE
#include <mist/config.h>
#include <mist/defines.h>
int main(int argc, char * argv[]) {
Util::Config conf(argv[0], PACKAGE_VERSION);
mistIn conv(&conf);
if (conf.parseArgs(argc, argv)) {
sem_t * playerLock = sem_open(std::string("/lock_" + conf.getString("streamname")).c_str(), O_CREAT | O_RDWR, ACCESSPERMS, 1);
if (sem_trywait(playerLock) == -1){
DEBUG_MSG(DLVL_DEVEL, "A player for stream %s is already running", conf.getString("streamname").c_str());
return 1;
}
conf.activate();
while (conf.is_active){
int pid = fork();
if (pid == 0){
sem_close(playerLock);
return conv.run();
}
if (pid == -1){
DEBUG_MSG(DLVL_FAIL, "Unable to spawn player process");
sem_post(playerLock);
return 2;
}
//wait for the process to exit
int status;
while (waitpid(pid, &status, 0) != pid && errno == EINTR) continue;
//clean up the semaphore by waiting for it, if it's non-zero
sem_t * waiting = sem_open(std::string("/wait_" + conf.getString("streamname")).c_str(), O_CREAT | O_RDWR, ACCESSPERMS, 0);
if (waiting == SEM_FAILED){
DEBUG_MSG(DLVL_FAIL, "Failed to open semaphore - cancelling");
return -1;
}
int sem_val = 0;
sem_getvalue(waiting, &sem_val);
while (sem_val){
while (sem_wait(waiting) == -1 && errno == EINTR) continue;
sem_getvalue(waiting, &sem_val);
}
sem_close(waiting);
//if the exit was clean, don't restart it
if (WIFEXITED(status) && (WEXITSTATUS(status) == 0)){
DEBUG_MSG(DLVL_DEVEL, "Finished player succesfully");
break;
}
}
sem_post(playerLock);
sem_close(playerLock);
}
return 0;
}

21
src/output/mist_out.cpp Normal file
View file

@ -0,0 +1,21 @@
#include OUTPUTTYPE
#include <mist/config.h>
#include <mist/socket.h>
int spawnForked(Socket::Connection & S){
mistOut tmp(S);
return tmp.run();
}
int main(int argc, char * argv[]) {
Util::Config conf(argv[0], PACKAGE_VERSION);
mistOut::init(&conf);
if (conf.parseArgs(argc, argv)) {
if (conf.getBool("json")) {
std::cout << mistOut::capa.toString() << std::endl;
return -1;
}
conf.serveForkedSocket(spawnForked);
}
return 0;
}

493
src/output/output.cpp Normal file
View file

@ -0,0 +1,493 @@
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/wait.h>
#include <unistd.h>
#include <semaphore.h>
#include <iterator> //std::distance
#include <mist/stream.h>
#include <mist/defines.h>
#include <mist/http_parser.h>
#include <mist/timing.h>
#include "output.h"
namespace Mist {
Util::Config * Output::config = NULL;
JSON::Value Output::capa = JSON::Value();
int getDTSCLen(char * mapped, long long int offset){
return ntohl(((int*)(mapped + offset))[1]);
}
long long int getDTSCTime(char * mapped, long long int offset){
char * timePoint = mapped + offset + 12;
return ((long long int)timePoint[0] << 56) | ((long long int)timePoint[1] << 48) | ((long long int)timePoint[2] << 40) | ((long long int)timePoint[3] << 32) | ((long long int)timePoint[4] << 24) | ((long long int)timePoint[5] << 16) | ((long long int)timePoint[6] << 8) | timePoint[7];
}
Output::Output(Socket::Connection & conn) : myConn(conn) {
firstTime = 0;
parseData = false;
wantRequest = true;
isInitialized = false;
isBlocking = false;
lastStats = 0;
maxSkipAhead = 7500;
minSkipAhead = 5000;
realTime = 1000;
if (myConn){
setBlocking(true);
}else{
DEBUG_MSG(DLVL_WARN, "Warning: MistOut created with closed socket!");
}
sentHeader = false;
}
void Output::setBlocking(bool blocking){
isBlocking = blocking;
myConn.setBlocking(isBlocking);
}
Output::~Output(){
statsPage.finish();
playerConn.finish();
}
void Output::updateMeta(){
unsigned int i = 0;
//read metadata from page to myMeta variable
JSON::Value jsonMeta;
JSON::fromDTMI((const unsigned char*)streamIndex.mapped + 8, streamIndex.len - 8, i, jsonMeta);
myMeta = DTSC::Meta(jsonMeta);
}
/// Called when stream initialization has failed.
/// The standard implementation will set isInitialized to false and close the client connection,
/// thus causing the process to exit cleanly.
void Output::onFail(){
isInitialized = false;
myConn.close();
}
void Output::initialize(){
if (isInitialized){
return;
}
if (streamIndex.mapped){
return;
}
isInitialized = true;
streamIndex.init(streamName,0,false,false);
if (!streamIndex.mapped){
sem_t * waiting = sem_open(std::string("/wait_" + streamName).c_str(), O_CREAT | O_RDWR, ACCESSPERMS, 0);
Util::Stream::getStream(streamName);
if (waiting == SEM_FAILED){
DEBUG_MSG(DLVL_FAIL, "Failed to open semaphore - cancelling");
onFail();
return;
}
#ifdef __APPLE__
unsigned int timeout = 0;
while (++timeout < 300 && sem_trywait(waiting) == -1 && (errno == EINTR || errno == EAGAIN) ){
Util::sleep(100);
}
#else
struct timespec ts;
ts.tv_sec = Util::epoch() + 30;
ts.tv_nsec = 0;
while (sem_timedwait(waiting, &ts) == -1 && errno == EINTR) continue;
#endif
sem_post(waiting);
sem_close(waiting);
streamIndex.init(streamName,0);
}
if (!streamIndex.mapped){
DEBUG_MSG(DLVL_FAIL, "Could not connect to server for %s\n", streamName.c_str());
onFail();
return;
}
statsPage = IPC::sharedClient("statistics", 88, true);
playerConn = IPC::sharedClient(streamName + "_users", 30, true);
updateMeta();
//check which tracks don't actually exist
std::set<long unsigned int> toRemove;
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
if (!myMeta.tracks.count(*it)){
toRemove.insert(*it);
}
}
//remove those from selectedtracks
for (std::set<long unsigned int>::iterator it = toRemove.begin(); it != toRemove.end(); it++){
selectedTracks.erase(*it);
}
//loop through all codec combinations, count max simultaneous active
unsigned int bestSoFar = 0;
unsigned int bestSoFarCount = 0;
unsigned int index = 0;
for (JSON::ArrIter it = capa["codecs"].ArrBegin(); it != capa["codecs"].ArrEnd(); it++){
unsigned int genCounter = 0;
unsigned int selCounter = 0;
if ((*it).size() > 0){
for (JSON::ArrIter itb = (*it).ArrBegin(); itb != (*it).ArrEnd(); itb++){
if ((*itb).size() > 0){
bool found = false;
for (JSON::ArrIter itc = (*itb).ArrBegin(); itc != (*itb).ArrEnd() && !found; itc++){
for (std::set<long unsigned int>::iterator itd = selectedTracks.begin(); itd != selectedTracks.end(); itd++){
if (myMeta.tracks[*itd].codec == (*itc).asStringRef()){
selCounter++;
found = true;
break;
}
}
if (!found){
for (std::map<int,DTSC::Track>::iterator trit = myMeta.tracks.begin(); trit != myMeta.tracks.end(); trit++){
if (trit->second.codec == (*itc).asStringRef()){
genCounter++;
found = true;
break;
}
}
}
}
}
}
if (selCounter == selectedTracks.size()){
if (selCounter + genCounter > bestSoFarCount){
bestSoFarCount = selCounter + genCounter;
bestSoFar = index;
DEBUG_MSG(DLVL_HIGH, "Match (%u/%u): %s", selCounter, selCounter+genCounter, (*it).toString().c_str());
}
}else{
DEBUG_MSG(DLVL_VERYHIGH, "Not a match for currently selected tracks: %s", (*it).toString().c_str());
}
}
index++;
}
DEBUG_MSG(DLVL_MEDIUM, "Trying to fill: %s", capa["codecs"][bestSoFar].toString().c_str());
//try to fill as many codecs simultaneously as possible
if (capa["codecs"][bestSoFar].size() > 0){
for (JSON::ArrIter itb = capa["codecs"][bestSoFar].ArrBegin(); itb != capa["codecs"][bestSoFar].ArrEnd(); itb++){
if ((*itb).size() > 0){
bool found = false;
for (JSON::ArrIter itc = (*itb).ArrBegin(); itc != (*itb).ArrEnd() && !found; itc++){
for (std::set<long unsigned int>::iterator itd = selectedTracks.begin(); itd != selectedTracks.end(); itd++){
if (myMeta.tracks[*itd].codec == (*itc).asStringRef()){
found = true;
break;
}
}
if (!found){
for (std::map<int,DTSC::Track>::iterator trit = myMeta.tracks.begin(); trit != myMeta.tracks.end(); trit++){
if (trit->second.codec == (*itc).asStringRef()){
selectedTracks.insert(trit->first);
found = true;
break;
}
}
}
}
}
}
}
#if DEBUG >= DLVL_MEDIUM
//print the selected tracks
std::stringstream selected;
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
if (it != selectedTracks.begin()){
selected << ", ";
}
selected << (*it);
}
DEBUG_MSG(DLVL_MEDIUM, "Selected tracks: %s", selected.str().c_str());
#endif
unsigned int firstms = 0x0;
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
lastKeyTime[*it] = 0xFFFFFFFF;
if (myMeta.tracks[*it].firstms > firstms){
firstms = myMeta.tracks[*it].firstms;
}
}
if (myMeta.live){
if (firstms < 5000){
firstms = 0;
}
seek(firstms);
}else{
seek(0);
}
}
/// Clears the buffer, sets parseData to false, and generally makes not very much happen at all.
void Output::stop(){
buffer.clear();
parseData = false;
}
unsigned int Output::getKeyForTime(long unsigned int trackId, long long timeStamp){
unsigned int keyNo = 0;
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[trackId].keys.begin(); it != myMeta.tracks[trackId].keys.end(); it++){
if (it->getTime() <= timeStamp){
keyNo = it->getNumber();
}else{
break;
}
}
return keyNo;
}
void Output::loadPageForKey(long unsigned int trackId, long long int keyNum){
if (keyNum >= myMeta.tracks[trackId].keys.rbegin()->getNumber()){
//curPages.erase(trackId);
return;
}
DEBUG_MSG(DLVL_MEDIUM, "Loading track %lu, containing key %lld", trackId, keyNum);
int pageNum = -1;
int keyAmount = -1;
unsigned int timeout = 0;
if (!indexPages.count(trackId)){
char id[100];
sprintf(id, "%s%lu", streamName.c_str(), trackId);
indexPages[trackId].init(id, 8192);
}
while (pageNum == -1 || keyAmount == -1){
for (int i = 0; i < indexPages[trackId].len / 8; i++){
long tmpKey = ntohl(((((long long int*)indexPages[trackId].mapped)[i]) >> 32) & 0xFFFFFFFF);
long amountKey = ntohl((((long long int*)indexPages[trackId].mapped)[i]) & 0xFFFFFFFF);
if (tmpKey <= keyNum && (tmpKey + amountKey) > keyNum){
pageNum = tmpKey;
keyAmount = amountKey;
break;
}
}
if (pageNum == -1 || keyAmount == -1){
if (!timeout){
DEBUG_MSG(DLVL_DEVEL, "Requesting/waiting for page that has key %lu:%lld...", trackId, keyNum);
}
if (timeout++ > 100){
DEBUG_MSG(DLVL_FAIL, "Timeout while waiting for requested page. Aborting.");
curPages.erase(trackId);
return;
}
nxtKeyNum[trackId] = keyNum-1;
stats();
Util::sleep(100);
}
}
nxtKeyNum[trackId] = pageNum;
if (currKeyOpen.count(trackId) && currKeyOpen[trackId] == pageNum){
return;
}
char id[100];
sprintf(id, "%s%lu_%d", streamName.c_str(), trackId, pageNum);
curPages[trackId].init(std::string(id),0);
if (!(curPages[trackId].mapped)){
DEBUG_MSG(DLVL_FAIL, "(%d) Initializing page %s failed", getpid(), curPages[trackId].name.c_str());
return;
}
currKeyOpen[trackId] = pageNum;
}
/// Prepares all tracks from selectedTracks for seeking to the specified ms position.
/// \todo Make this actually seek, instead of always loading position zero.
void Output::seek(long long pos){
firstTime = Util::getMS() - pos;
if (!isInitialized){
initialize();
}
buffer.clear();
currentPacket.null();
updateMeta();
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
seek(*it, pos);
}
}
bool Output::seek(int tid, long long pos, bool getNextKey){
loadPageForKey(tid, getKeyForTime(tid, pos) + (getNextKey?1:0));
if (!curPages.count(tid) || !curPages[tid].mapped){
DEBUG_MSG(DLVL_DEVEL, "Aborting seek to %llims in track %d, not available.", pos, tid);
return false;
}
sortedPageInfo tmp;
tmp.tid = tid;
tmp.offset = 0;
DTSC::Packet tmpPack;
tmpPack.reInit(curPages[tid].mapped + tmp.offset, 0, true);
tmp.time = tmpPack.getTime();
while ((long long)tmp.time < pos && tmpPack){
tmp.offset += tmpPack.getDataLen();
tmpPack.reInit(curPages[tid].mapped + tmp.offset, 0, true);
tmp.time = tmpPack.getTime();
}
if (tmpPack){
buffer.insert(tmp);
return true;
}else{
//don't print anything for empty packets - not sign of corruption, just unfinished stream.
if (curPages[tid].mapped[tmp.offset] != 0){
DEBUG_MSG(DLVL_FAIL, "Noes! Couldn't find packet on track %d because of some kind of corruption error or somesuch.", tid);
}else{
DEBUG_MSG(DLVL_FAIL, "Track %d no data (key %u) - waiting...", tid, getKeyForTime(tid, pos) + (getNextKey?1:0));
}
return false;
}
}
int Output::run() {
bool firstData = true;//only the first time, we call OnRequest if there's data buffered already.
DEBUG_MSG(DLVL_MEDIUM, "MistOut client handler started");
while (myConn.connected() && (wantRequest || parseData)){
stats();
if (wantRequest){
if ((firstData && myConn.Received().size()) || myConn.spool()){
firstData = false;
DEBUG_MSG(DLVL_VERYHIGH, "(%d) OnRequest", getpid());
onRequest();
}else{
if (!isBlocking && !parseData){
Util::sleep(500);
}
}
}
if (parseData){
if (!isInitialized){
initialize();
}
if ( !sentHeader){
DEBUG_MSG(DLVL_VERYHIGH, "(%d) SendHeader", getpid());
sendHeader();
}
prepareNext();
if (currentPacket){
sendNext();
}else{
if (!onFinish()){
break;
}
}
}
}
DEBUG_MSG(DLVL_MEDIUM, "MistOut client handler shutting down: %s, %s, %s", myConn.connected() ? "conn_active" : "conn_closed", wantRequest ? "want_request" : "no_want_request", parseData ? "parsing_data" : "not_parsing_data");
myConn.close();
return 0;
}
void Output::prepareNext(){
static unsigned int emptyCount = 0;
if (!buffer.size()){
currentPacket.null();
DEBUG_MSG(DLVL_DEVEL, "Buffer completely played out");
return;
}
sortedPageInfo nxt = *(buffer.begin());
buffer.erase(buffer.begin());
DEBUG_MSG(DLVL_VERYHIGH, "Loading track %u (next=%lu), part @ %u/%lld", nxt.tid, nxtKeyNum[nxt.tid], nxt.offset, curPages[nxt.tid].len);
if (nxt.offset >= curPages[nxt.tid].len){
loadPageForKey(nxt.tid, ++nxtKeyNum[nxt.tid]);
nxt.offset = 0;
}
if (!curPages.count(nxt.tid) || !curPages[nxt.tid].mapped){
//mapping failure? Drop this track and go to next.
//not an error - usually means end of stream.
DEBUG_MSG(DLVL_DEVEL, "Track %u no page - dropping track.", nxt.tid);
prepareNext();
return;
}
if (!memcmp(curPages[nxt.tid].mapped + nxt.offset, "\000\000\000\000", 4)){
if (!currentPacket.getTime()){
DEBUG_MSG(DLVL_DEVEL, "Timeless empty packet on track %u - dropping track.", nxt.tid);
prepareNext();
return;
}
Util::sleep(500);
updateMeta();
if (myMeta && ++emptyCount < 20){
if (!seek(nxt.tid, currentPacket.getTime(), true)){
buffer.insert(nxt);
}
}else{
DEBUG_MSG(DLVL_DEVEL, "Empty packet on track %u - could not reload, dropping track.", nxt.tid);
}
prepareNext();
return;
}
currentPacket.reInit(curPages[nxt.tid].mapped + nxt.offset, 0, true);
if (currentPacket){
nxtKeyNum[nxt.tid] = getKeyForTime(nxt.tid, currentPacket.getTime());
emptyCount = 0;
}
nxt.offset += currentPacket.getDataLen();
if (realTime && !myMeta.live){
while (nxt.time > (Util::getMS() - firstTime + maxSkipAhead)*1000/realTime) {
Util::sleep(nxt.time - (Util::getMS() - firstTime + minSkipAhead)*1000/realTime);
}
}
if (curPages[nxt.tid]){
if (nxt.offset < curPages[nxt.tid].len){
nxt.time = getDTSCTime(curPages[nxt.tid].mapped, nxt.offset);
}
buffer.insert(nxt);
}
playerConn.keepAlive();
}
void Output::stats(){
if (!statsPage.getData()){
return;
}
unsigned long long int now = Util::epoch();
if (now != lastStats){
lastStats = now;
IPC::statExchange tmpEx(statsPage.getData());
tmpEx.now(now);
tmpEx.host(myConn.getHost());
tmpEx.streamName(streamName);
tmpEx.connector(capa["name"].asString());
tmpEx.up(myConn.dataUp());
tmpEx.down(myConn.dataDown());
tmpEx.time(now - myConn.connTime());
statsPage.keepAlive();
}
int tNum = 0;
for (std::set<unsigned long>::iterator it = selectedTracks.begin(); it != selectedTracks.end() && tNum < 5; it++){
char thisData[6];
thisData[0] = ((*it >> 24) & 0xFF);
thisData[1] = ((*it >> 16) & 0xFF);
thisData[2] = ((*it >> 8) & 0xFF);
thisData[3] = ((*it) & 0xFF);
thisData[4] = ((nxtKeyNum[*it] >> 8) & 0xFF);
thisData[5] = ((nxtKeyNum[*it]) & 0xFF);
memcpy(playerConn.getData() + (6 * tNum), thisData, 6);
tNum ++;
playerConn.keepAlive();
}
if (tNum >= 5){
DEBUG_MSG(DLVL_WARN, "Too many tracks selected, using only first 5");
}
}
void Output::onRequest(){
//simply clear the buffer, we don't support any kind of input by default
myConn.Received().clear();
wantRequest = false;
}
void Output::sendHeader(){
//just set the sentHeader bool to true, by default
sentHeader = true;
}
}

98
src/output/output.h Normal file
View file

@ -0,0 +1,98 @@
#include <set>
#include <cstdlib>
#include <map>
#include <mist/config.h>
#include <mist/json.h>
#include <mist/flv_tag.h>
#include <mist/timing.h>
#include <mist/dtsc.h>
#include <mist/socket.h>
#include <mist/shared_memory.h>
namespace Mist {
/// This struct keeps packet information sorted in playback order, so the
/// Mist::Output class knows when to buffer which packet.
struct sortedPageInfo{
bool operator < (const sortedPageInfo & rhs) const {
if (time < rhs.time){
return true;
}
return (time == rhs.time && tid < rhs.tid);
}
int tid;
long long unsigned int time;
unsigned int offset;
};
/// The output class is intended to be inherited by MistOut process classes.
/// It contains all generic code and logic, while the child classes implement
/// anything specific to particular protocols or containers.
/// It contains several virtual functions, that may be overridden to "hook" into
/// the streaming process at those particular points, simplifying child class
/// logic and implementation details.
class Output {
public:
//constructor and destructor
Output(Socket::Connection & conn);
virtual ~Output();
//static members for initialization and capabilities
static void init(Util::Config * cfg) {}
static JSON::Value capa;
//non-virtual generic functions
int run();
void stats();
void seek(long long pos);
bool seek(int tid, long long pos, bool getNextKey = false);
void stop();
void setBlocking(bool blocking);
void updateMeta();
//virtuals. The optional virtuals have default implementations that do as little as possible.
virtual void sendNext() {}//REQUIRED! Others are optional.
virtual void prepareNext();
virtual void onRequest();
virtual bool onFinish(){return false;}
virtual void initialize();
virtual void sendHeader();
virtual void onFail();
private://these *should* not be messed with in child classes.
std::map<unsigned long, unsigned int> currKeyOpen;
void loadPageForKey(long unsigned int trackId, long long int keyNum);
bool isBlocking;///< If true, indicates that myConn is blocking.
unsigned int lastStats;///<Time of last sending of stats.
IPC::sharedClient statsPage;///< Shared memory used for statistics reporting.
long long unsigned int firstTime;///< Time of first packet after last seek. Used for real-time sending.
std::map<unsigned long, unsigned long> nxtKeyNum;///< Contains the number of the next key, for page seeking purposes.
std::set<sortedPageInfo> buffer;///< A sorted list of next-to-be-loaded packets.
std::map<unsigned long, unsigned long> lastKeyTime;///< Stores the time of the last keyframe, for preventing duplicates
protected://these are to be messed with by child classes
unsigned int getKeyForTime(long unsigned int trackId, long long timeStamp);
IPC::sharedPage streamIndex;///< Shared memory used for metadata
std::map<int,IPC::sharedPage> indexPages;///< Maintains index pages of each track, holding information about available pages with DTSC packets.
std::map<int,IPC::sharedPage> curPages;///< Holds the currently used pages with DTSC packets for each track.
/// \todo Privitize keyTimes
IPC::sharedClient playerConn;///< Shared memory used for connection to MistIn process.
std::map<int,std::set<int> > keyTimes;///< Per-track list of keyframe times, for keyframe detection.
//static member for initialization
static Util::Config * config;///< Static, global configuration for the MistOut process
//stream delaying variables
unsigned int maxSkipAhead;///< Maximum ms that we will go ahead of the intended timestamps.
unsigned int minSkipAhead;///< Minimum ms that we will go ahead of the intended timestamps.
unsigned int realTime;///< Playback speed times 1000 (1000 == 1.0X). Zero is infinite.
//Read/write status variables
Socket::Connection & myConn;///< Connection to the client.
std::string streamName;///< Name of the stream that will be opened by initialize()
std::set<unsigned long> selectedTracks; ///< Tracks that are selected for playback
bool wantRequest;///< If true, waits for a request.
bool parseData;///< If true, triggers initalization if not already done, sending of header, sending of packets.
bool isInitialized;///< If false, triggers initialization if parseData is true.
bool sentHeader;///< If false, triggers sendHeader if parseData is true.
//Read-only stream data variables
DTSC::Packet currentPacket;///< The packet that is ready for sending now.
DTSC::Meta myMeta;///< Up to date stream metadata
};
}

266
src/output/output_hds.cpp Normal file
View file

@ -0,0 +1,266 @@
#include "output_hds.h"
#include <mist/defines.h>
#include <mist/http_parser.h>
#include <mist/stream.h>
#include <unistd.h>
#include <mist/amf.h>
#include <mist/mp4_adobe.h>
namespace Mist {
void OutHDS::getTracks(){
/// \todo Why do we have only one audio track option?
videoTracks.clear();
audioTrack = 0;
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6"){
videoTracks.insert(it->first);
}
if (it->second.codec == "AAC" || it->second.codec == "MP3"){
audioTrack = it->first;
}
}
}
///\brief Builds a bootstrap for use in HTTP Dynamic streaming.
///\param tid The track this bootstrap is generated for.
///\return The generated bootstrap.
std::string OutHDS::dynamicBootstrap(int tid){
updateMeta();
std::string empty;
MP4::ASRT asrt;
asrt.setUpdate(false);
asrt.setVersion(1);
//asrt.setQualityEntry(empty, 0);
if (myMeta.live){
asrt.setSegmentRun(1, 4294967295ul, 0);
}else{
asrt.setSegmentRun(1, myMeta.tracks[tid].keys.size(), 0);
}
MP4::AFRT afrt;
afrt.setUpdate(false);
afrt.setVersion(1);
afrt.setTimeScale(1000);
//afrt.setQualityEntry(empty, 0);
MP4::afrt_runtable afrtrun;
int i = 0;
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[tid].keys.begin(); it != myMeta.tracks[tid].keys.end(); it++){
if (it->getLength()){
afrtrun.firstFragment = it->getNumber();
afrtrun.firstTimestamp = it->getTime();
afrtrun.duration = it->getLength();
afrt.setFragmentRun(afrtrun, i);
i++;
}
}
MP4::ABST abst;
abst.setVersion(1);
abst.setBootstrapinfoVersion(1);
abst.setProfile(0);
abst.setUpdate(false);
abst.setTimeScale(1000);
abst.setLive(myMeta.live);
abst.setCurrentMediaTime(myMeta.tracks[tid].lastms);
abst.setSmpteTimeCodeOffset(0);
abst.setMovieIdentifier(streamName);
abst.setSegmentRunTable(asrt, 0);
abst.setFragmentRunTable(afrt, 0);
DEBUG_MSG(DLVL_VERYHIGH, "Sending bootstrap: %s", abst.toPrettyString(0).c_str());
return std::string((char*)abst.asBox(), (int)abst.boxedSize());
}
///\brief Builds an index file for HTTP Dynamic streaming.
///\return The index file for HTTP Dynamic Streaming.
std::string OutHDS::dynamicIndex(){
getTracks();
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-8\"?>" << std::endl;
Result << " <manifest xmlns=\"http://ns.adobe.com/f4m/1.0\">" << std::endl;
Result << " <id>" << streamName << "</id>" << std::endl;
Result << " <mimeType>video/mp4</mimeType>" << std::endl;
Result << " <deliveryType>streaming</deliveryType>" << std::endl;
if (myMeta.vod){
Result << " <duration>" << myMeta.tracks[*videoTracks.begin()].lastms / 1000 << ".000</duration>" << std::endl;
Result << " <streamType>recorded</streamType>" << std::endl;
}else{
Result << " <duration>0.00</duration>" << std::endl;
Result << " <streamType>live</streamType>" << std::endl;
}
for (std::set<int>::iterator it = videoTracks.begin(); it != videoTracks.end(); it++){
Result << " <bootstrapInfo "
"profile=\"named\" "
"id=\"boot" << (*it) << "\" "
"url=\"" << (*it) << ".abst\">"
"</bootstrapInfo>" << std::endl;
Result << " <media "
"url=\"" << (*it) << "-\" "
"bitrate=\"" << myMeta.tracks[(*it)].bps * 8 << "\" "
"bootstrapInfoId=\"boot" << (*it) << "\" "
"width=\"" << myMeta.tracks[(*it)].width << "\" "
"height=\"" << myMeta.tracks[(*it)].height << "\">" << std::endl;
Result << " <metadata>AgAKb25NZXRhRGF0YQMAAAk=</metadata>" << std::endl;
Result << " </media>" << std::endl;
}
Result << "</manifest>" << std::endl;
DEBUG_MSG(DLVL_HIGH, "Sending manifest: %s", Result.str().c_str());
return Result.str();
} //BuildManifest
OutHDS::OutHDS(Socket::Connection & conn) : Output(conn) {
audioTrack = 0;
playUntil = 0;
}
void OutHDS::onFail(){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
OutHDS::~OutHDS() {}
void OutHDS::init(Util::Config * cfg){
capa["desc"] = "Enables HTTP protocol Adobe-specific dynamic streaming (also known as HDS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/dynamic/$/manifest.f4m";
capa["url_prefix"] = "/dynamic/$/";
capa["socket"] = "http_hds";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/11";
capa["methods"][0u]["priority"] = 7ll;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutHDS::sendNext(){
if (currentPacket.getTime() >= playUntil){
DEBUG_MSG(DLVL_DEVEL, "(%d) Done sending fragment", getpid() );
stop();
wantRequest = true;
HTTP_S.Chunkify("", 0, myConn);
return;
}
tag.DTSCLoader(currentPacket, myMeta.tracks[currentPacket.getTrackId()]);
HTTP_S.Chunkify(tag.data, tag.len, myConn);
}
void OutHDS::onRequest(){
HTTP_R.Clean();
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request: %s", HTTP_R.getUrl().c_str());
if (HTTP_R.url.find(".abst") != std::string::npos){
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
std::string streamID = HTTP_R.url.substr(streamName.size() + 10);
streamID = streamID.substr(0, streamID.find(".abst"));
HTTP_S.Clean();
HTTP_S.SetBody(dynamicBootstrap(atoll(streamID.c_str())));
HTTP_S.SetHeader("Content-Type", "binary/octet");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SendResponse("200", "OK", myConn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}
if (HTTP_R.url.find("f4m") == std::string::npos){
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
initialize();
std::string tmp_qual = HTTP_R.url.substr(HTTP_R.url.find("/", 10) + 1);
unsigned int tid;
unsigned int fragNum;
tid = atoi(tmp_qual.substr(0, tmp_qual.find("Seg") - 1).c_str());
int temp;
temp = HTTP_R.url.find("Seg") + 3;
temp = HTTP_R.url.find("Frag") + 4;
fragNum = atoi(HTTP_R.url.substr(temp).c_str());
DEBUG_MSG(DLVL_MEDIUM, "Video track %d, fragment %d\n", tid, fragNum);
if (!audioTrack){getTracks();}
unsigned int mstime = 0;
unsigned int mslen = 0;
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[tid].keys.begin(); it != myMeta.tracks[tid].keys.end(); it++){
if (it->getNumber() >= fragNum){
mstime = it->getTime();
mslen = it->getLength();
if (myMeta.live){
if (it == myMeta.tracks[tid].keys.end() - 2){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
HTTP_S.SendResponse("208", "Ask again later", myConn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment " << fragNum << " not available yet" << std::endl;
/*
///\todo patch this back in?
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){}
}
*/
}
}
break;
}
}
if (HTTP_R.url == "/"){continue;}//Don't continue, but continue instead.
if (myMeta.live){
if (mstime == 0 && fragNum > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
HTTP_S.SendResponse("412", "Fragment out of range", myConn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment " << fragNum << " too old" << std::endl;
continue;
}
}
selectedTracks.clear();
selectedTracks.insert(tid);
selectedTracks.insert(audioTrack);
seek(mstime);
playUntil = mstime + mslen;
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, myConn);
//send the bootstrap
std::string bootstrap = dynamicBootstrap(tid);
HTTP_S.Chunkify(bootstrap, myConn);
//send a zero-size mdat, meaning it stretches until end of file.
HTTP_S.Chunkify("\000\000\000\000mdat", 8, myConn);
//send init data, if needed.
if (audioTrack > 0){
tag.DTSCAudioInit(myMeta.tracks[audioTrack]);
tag.tagTime(mstime);
HTTP_S.Chunkify(tag.data, tag.len, myConn);
}
if (tid > 0){
tag.DTSCVideoInit(myMeta.tracks[tid]);
tag.tagTime(mstime);
HTTP_S.Chunkify(tag.data, tag.len, myConn);
}
parseData = true;
wantRequest = false;
}else{
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
initialize();
std::stringstream tmpstr;
myMeta.toPrettyString(tmpstr);
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SetBody(dynamicIndex());
HTTP_S.SendResponse("200", "OK", myConn);
}
HTTP_R.Clean(); //clean for any possible next requests
}
}
}

30
src/output/output_hds.h Normal file
View file

@ -0,0 +1,30 @@
#include "output.h"
#include <mist/http_parser.h>
#include <mist/ts_packet.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
namespace Mist {
class OutHDS : public Output {
public:
OutHDS(Socket::Connection & conn);
~OutHDS();
static void init(Util::Config * cfg);
void onRequest();
void onFail();
void sendNext();
protected:
void getTracks();
std::string dynamicBootstrap(int tid);
std::string dynamicIndex();
HTTP::Parser HTTP_S;
HTTP::Parser HTTP_R;
std::set<int> videoTracks;///<< Holds valid video tracks for playback
long long int audioTrack;///<< Holds audio track ID for playback
long long unsigned int playUntil;
FLV::Tag tag;
};
}
typedef Mist::OutHDS mistOut;

282
src/output/output_hls.cpp Normal file
View file

@ -0,0 +1,282 @@
#include "output_hls.h"
#include <mist/defines.h>
#include <mist/http_parser.h>
#include <mist/stream.h>
#include <unistd.h>
namespace Mist {
///\brief Builds an index file for HTTP Live streaming.
///\return The index file for HTTP Live Streaming.
std::string OutHLS::liveIndex(){
std::stringstream result;
result << "#EXTM3U\r\n";
int audioId = -1;
std::string audioName;
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "AAC"){
audioId = it->first;
audioName = it->second.getIdentifier();
break;
}
}
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "H264"){
int bWidth = it->second.bps * 2;
if (audioId != -1){
bWidth += myMeta.tracks[audioId].bps * 2;
}
result << "#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=" << bWidth * 10 << "\r\n";
result << it->first;
if (audioId != -1){
result << "_" << audioId;
}
result << "/index.m3u8\r\n";
}
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
}
std::string OutHLS::liveIndex(int tid){
updateMeta();
std::stringstream result;
//parse single track
int longestFragment = 0;
if (!myMeta.tracks[tid].fragments.size()){
DEBUG_MSG(DLVL_FAIL, "liveIndex called with track %d, which has no fragments!", tid);
return "";
}
for (std::deque<DTSC::Fragment>::iterator it = myMeta.tracks[tid].fragments.begin(); (it + 1) != myMeta.tracks[tid].fragments.end(); it++){
if (it->getDuration() > longestFragment){
longestFragment = it->getDuration();
}
}
result << "#EXTM3U\r\n"
"#EXT-X-TARGETDURATION:" << (longestFragment / 1000) + 1 << "\r\n"
"#EXT-X-MEDIA-SEQUENCE:" << myMeta.tracks[tid].missedFrags << "\r\n";
for (std::deque<DTSC::Fragment>::iterator it = myMeta.tracks[tid].fragments.begin(); it != myMeta.tracks[tid].fragments.end(); it++){
long long int starttime = myMeta.tracks[tid].getKey(it->getNumber()).getTime();
if (it != (myMeta.tracks[tid].fragments.end() - 1)){
result << "#EXTINF:" << ((it->getDuration() + 500) / 1000) << ", no desc\r\n" << starttime << "_" << it->getDuration() + starttime << ".ts\r\n";
}
}
if ( !myMeta.live){
result << "#EXT-X-ENDLIST\r\n";
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
} //liveIndex
OutHLS::OutHLS(Socket::Connection & conn) : Output(conn) {
haveAvcc = false;
}
OutHLS::~OutHLS() {}
void OutHLS::onFail(){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
void OutHLS::init(Util::Config * cfg){
capa["name"] = "HTTP_Live";
capa["desc"] = "Enables HTTP protocol Apple-specific streaming (also known as HLS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/hls/$/index.m3u8";
capa["url_prefix"] = "/hls/$/";
capa["socket"] = "http_hls";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.apple.mpegurl";
capa["methods"][0u]["priority"] = 9ll;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutHLS::sendNext(){
Socket::Buffer ToPack;
char * ContCounter = 0;
bool IsKeyFrame = false;
char * dataPointer = 0;
int dataLen = 0;
currentPacket.getString("data", dataPointer, dataLen);
if (currentPacket.getTime() >= until){
DEBUG_MSG(DLVL_DEVEL, "(%d) Done sending fragment", getpid() );
stop();
wantRequest = true;
HTTP_S.Chunkify("", 0, myConn);
HTTP_S.Clean();
return;
}
//detect packet type, and put converted data into ToPack.
if (myMeta.tracks[currentPacket.getTrackId()].type == "video"){
ToPack.append(TS::Packet::getPESVideoLeadIn(0ul, currentPacket.getTime() * 90));
IsKeyFrame = currentPacket.getInt("keyframe");
if (IsKeyFrame){
if (!haveAvcc){
avccbox.setPayload(myMeta.tracks[currentPacket.getTrackId()].init);
haveAvcc = true;
}
ToPack.append(avccbox.asAnnexB());
}
unsigned int i = 0;
while (i + 4 < (unsigned int)dataLen){
unsigned int ThisNaluSize = (dataPointer[i] << 24) + (dataPointer[i+1] << 16) + (dataPointer[i+2] << 8) + dataPointer[i+3];
if (ThisNaluSize + i + 4 > (unsigned int)dataLen){
DEBUG_MSG(DLVL_WARN, "Too big NALU detected (%u > %d) - skipping!", ThisNaluSize + i + 4, dataLen);
break;
}
ToPack.append("\000\000\000\001", 4);
i += 4;
ToPack.append(dataPointer + i, ThisNaluSize);
i += ThisNaluSize;
}
ContCounter = &VideoCounter;
}else if (myMeta.tracks[currentPacket.getTrackId()].type == "audio"){
if (AppleCompat){
ToPack.append(TS::Packet::getPESAudioLeadIn(7+dataLen, lastVid));
}else{
ToPack.append(TS::Packet::getPESAudioLeadIn(7+dataLen, currentPacket.getTime() * 90));
}
ToPack.append(TS::GetAudioHeader(dataLen, myMeta.tracks[currentPacket.getTrackId()].init));
ToPack.append(dataPointer, dataLen);
ContCounter = &AudioCounter;
}
bool first = true;
//send TS packets
while (ToPack.size()){
if (PacketNumber % 42 == 0){
HTTP_S.Chunkify(TS::PAT, 188, myConn);
HTTP_S.Chunkify(TS::PMT, 188, myConn);
PacketNumber += 2;
}
PackData.Clear();
/// \todo Update according to sendHeader()'s generated data.
//0x100 - 1 + currentPacket.getTrackId()
if (myMeta.tracks[currentPacket.getTrackId()].type == "video"){
PackData.PID(0x100);
}else{
PackData.PID(0x101);
}
PackData.ContinuityCounter((*ContCounter)++);
if (first){
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(currentPacket.getTime() * 27000);
}
first = false;
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
HTTP_S.Chunkify(PackData.ToString(), 188, myConn);
PacketNumber ++;
}
}
int OutHLS::canSeekms(unsigned int ms){
//no tracks? Frame too new by definition.
if ( !myMeta.tracks.size()){
return 1;
}
//loop trough all the tracks
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
//return "too late" if one track is past this point
if (ms < it->second.firstms){
return -1;
}
//return "too early" if one track is not yet at this point
if (ms > it->second.lastms){
return 1;
}
}
return 0;
}
void OutHLS::onRequest(){
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request: %s", HTTP_R.getUrl().c_str());
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
AppleCompat = (HTTP_R.GetHeader("User-Agent").find("Apple") != std::string::npos);
streamName = HTTP_R.GetHeader("X-Stream");
initialize();
if (HTTP_R.url.find(".m3u") == std::string::npos){
std::string tmpStr = HTTP_R.getUrl();
std::string fmtStr = "/hls/" + streamName + "/%u_%u/%llu_%llu.ts";
long long unsigned int from;
sscanf(tmpStr.c_str(), fmtStr.c_str(), &vidTrack, &audTrack, &from, &until);
DEBUG_MSG(DLVL_DEVEL, "Vid %u, Aud %u, From %llu, Until %llu", vidTrack, audTrack, from, until);
selectedTracks.clear();
selectedTracks.insert(vidTrack);
selectedTracks.insert(audTrack);
if (myMeta.live){
/// \todo Detection of out-of-range parts.
int seekable = canSeekms(from);
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
myConn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
DEBUG_MSG(DLVL_WARN, "Fragment @ %llu too old", from);
continue;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
myConn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
DEBUG_MSG(DLVL_WARN, "Fragment @ %llu not available yet", from);
continue;
}
}
seek(from);
lastVid = from * 90;
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp2t");
HTTP_S.StartResponse(HTTP_R, myConn);
PacketNumber = 0;
parseData = true;
wantRequest = false;
}else{
streamName = HTTP_R.GetHeader("X-Stream");
initialize();
std::string request = HTTP_R.url.substr(HTTP_R.url.find("/", 5) + 1);
HTTP_S.Clean();
if (HTTP_R.url.find(".m3u8") != std::string::npos){
HTTP_S.SetHeader("Content-Type", "audio/x-mpegurl");
}else{
HTTP_S.SetHeader("Content-Type", "audio/mpegurl");
}
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest;
if (request.find("/") == std::string::npos){
manifest = liveIndex();
}else{
int selectId = atoi(request.substr(0,request.find("/")).c_str());
manifest = liveIndex(selectId);
}
HTTP_S.SetBody(manifest);
HTTP_S.SendResponse("200", "OK", myConn);
}
HTTP_R.Clean(); //clean for any possible next requests
}
}
}

39
src/output/output_hls.h Normal file
View file

@ -0,0 +1,39 @@
#include "output.h"
#include <mist/http_parser.h>
#include <mist/ts_packet.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
namespace Mist {
class OutHLS : public Output {
public:
OutHLS(Socket::Connection & conn);
~OutHLS();
static void init(Util::Config * cfg);
void onRequest();
void onFail();
void sendNext();
protected:
HTTP::Parser HTTP_S;
HTTP::Parser HTTP_R;
std::string liveIndex();
std::string liveIndex(int tid);
int canSeekms(unsigned int ms);
int keysToSend;
long long int playUntil;
TS::Packet PackData;
unsigned int PacketNumber;
bool haveAvcc;
char VideoCounter;
char AudioCounter;
MP4::AVCC avccbox;
bool AppleCompat;
long long unsigned int lastVid;
long long unsigned int until;
unsigned int vidTrack;
unsigned int audTrack;
};
}
typedef Mist::OutHLS mistOut;

484
src/output/output_hss.cpp Normal file
View file

@ -0,0 +1,484 @@
#include "output_hss.h"
#include <mist/defines.h>
#include <mist/mp4.h>
#include <mist/mp4_ms.h>
#include <mist/mp4_generic.h>
#include <mist/mp4_encryption.h>
#include <mist/base64.h>
#include <mist/http_parser.h>
#include <mist/stream.h>
#include <unistd.h>
///\todo Maybe move to util?
long long unsigned int binToInt(std::string & binary) {
long long int result = 0;
for (int i = 0; i < 8; i++) {
result <<= 8;
result += binary[i];
}
return result;
}
std::string intToBin(long long unsigned int number) {
std::string result;
result.resize(8);
for (int i = 7; i >= 0; i--) {
result[i] = number & 0xFF;
number >>= 8;
}
return result;
}
std::string toUTF16(std::string original) {
std::string result;
result += (char)0xFF;
result += (char)0xFE;
for (std::string::iterator it = original.begin(); it != original.end(); it++) {
result += (*it);
result += (char)0x00;
}
return result;
}
namespace Mist {
OutHSS::OutHSS(Socket::Connection & conn) : Output(conn) { }
OutHSS::~OutHSS() {}
void OutHSS::init(Util::Config * cfg) {
capa["name"] = "HTTP_Smooth";
capa["desc"] = "Enables HTTP protocol Microsoft-specific smooth streaming through silverlight (also known as HSS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/smooth/$.ism/Manifest";
capa["url_prefix"] = "/smooth/$.ism/";
capa["socket"] = "http_hss";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.ms-ss";
capa["methods"][0u]["priority"] = 9ll;
capa["methods"][0u]["nolive"] = 1;
capa["methods"][1u]["handler"] = "http";
capa["methods"][1u]["type"] = "silverlight";
capa["methods"][1u]["priority"] = 1ll;
capa["methods"][1u]["nolive"] = 1;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutHSS::sendNext() {
if (currentPacket.getTime() >= playUntil) {
DEBUG_MSG(DLVL_DEVEL, "(%d) Done sending fragment %d:%d", getpid(), myTrackStor, myKeyStor);
stop();
wantRequest = true;
HTTP_S.Chunkify("", 0, myConn);
HTTP_R.Clean();
return;
}
char * dataPointer = 0;
int len = 0;
currentPacket.getString("data", dataPointer, len);
HTTP_S.Chunkify(dataPointer, len, myConn);
}
void OutHSS::onFail(){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
int OutHSS::canSeekms(unsigned int ms) {
//no tracks? Frame too new by definition.
if (!myMeta.tracks.size()) {
DEBUG_MSG(DLVL_DEVEL, "HSS Canseek to %d returns 1 because no tracks", ms);
return 1;
}
//loop trough all selected tracks
for (std::set<unsigned long>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++) {
//return "too late" if one track is past this point
if (ms < myMeta.tracks[*it].firstms) {
DEBUG_MSG(DLVL_DEVEL, "HSS Canseek to %d returns -1 because track %lu firstms == %d", ms, *it, myMeta.tracks[*it].firstms);
return -1;
}
//return "too early" if one track is not yet at this point
if (ms > myMeta.tracks[*it].lastms) {
DEBUG_MSG(DLVL_DEVEL, "HSS Canseek to %d returns 1 because track %lu lastms == %d", ms, *it, myMeta.tracks[*it].lastms);
return 1;
}
}
return 0;
}
void OutHSS::sendHeader() {
//We have a non-manifest request, parse it.
std::string Quality = HTTP_R.url.substr(HTTP_R.url.find("TrackID=", 8) + 8);
Quality = Quality.substr(0, Quality.find(")"));
std::string parseString = HTTP_R.url.substr(HTTP_R.url.find(")/") + 2);
parseString = parseString.substr(parseString.find("(") + 1);
long long int seekTime = atoll(parseString.substr(0, parseString.find(")")).c_str()) / 10000;
unsigned int tid = atoll(Quality.c_str());
selectedTracks.clear();
selectedTracks.insert(tid);
if (myMeta.live) {
updateMeta();
int seekable = canSeekms(seekTime / 10000);
if (seekable == 0){
// iff the fragment in question is available, check if the next is available too
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[tid].keys.begin(); it != myMeta.tracks[tid].keys.end(); it++){
if (it->getTime() >= (seekTime / 10000)){
if ((it + 1) == myMeta.tracks[tid].keys.end()){
seekable = 1;
}
break;
}
}
}
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
myConn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << seekTime / 10000 << "ms too old (" << myMeta.tracks[tid].firstms << " - " << myMeta.tracks[tid].lastms << " ms)" << std::endl;
stop();
wantRequest = true;
return;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
myConn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << seekTime / 10000 << "ms not available yet (" << myMeta.tracks[tid].firstms << " - " << myMeta.tracks[tid].lastms << " ms)" << std::endl;
stop();
wantRequest = true;
return;
}
}
DEBUG_MSG(DLVL_DEVEL, "(%d) Seeking to time %lld on track %d", getpid(), seekTime, tid);
seek(seekTime);
playUntil = (*(keyTimes[tid].upper_bound(seekTime)));
DEBUG_MSG(DLVL_DEVEL, "Set playUntil to %lld", playUntil);
myTrackStor = tid;
myKeyStor = seekTime;
keysToSend = 1;
//Seek to the right place and send a play-once for a single fragment.
std::stringstream sstream;
int partOffset = 0;
int keyDur = 0;
DTSC::Key keyObj;
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[tid].keys.begin(); it != myMeta.tracks[tid].keys.end(); it++) {
if (it->getTime() >= seekTime) {
keyObj = (*it);
keyDur = it->getLength();
std::deque<DTSC::Key>::iterator nextIt = it;
nextIt++;
if (nextIt == myMeta.tracks[tid].keys.end()) {
if (myMeta.live) {
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
myConn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment @ " << (seekTime / 10000) << " not available yet" << std::endl;
}
}
break;
}
partOffset += it->getParts();
}
if (HTTP_R.url == "/") {
return; //Don't continue, but continue instead.
}
/*
if (myMeta.live) {
if (mstime == 0 && (seekTime / 10000) > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
myConn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << (seekTime / 10000) << " too old" << std::endl;
continue;
}
}
*/
///\todo Select correct track (tid);
//Wrap everything in mp4 boxes
MP4::MFHD mfhd_box;
mfhd_box.setSequenceNumber(((keyObj.getNumber() - 1) * 2) + tid);///\todo Urgent: Check this for multitrack... :P wtf... :P
MP4::TFHD tfhd_box;
tfhd_box.setFlags(MP4::tfhdSampleFlag);
tfhd_box.setTrackID(tid);
if (myMeta.tracks[tid].type == "video") {
tfhd_box.setDefaultSampleFlags(0x00004001);
} else {
tfhd_box.setDefaultSampleFlags(0x00008002);
}
MP4::TRUN trun_box;
trun_box.setDataOffset(42);///\todo Check if this is a placeholder, or an actually correct number
unsigned int keySize = 0;
if (myMeta.tracks[tid].type == "video") {
trun_box.setFlags(MP4::trundataOffset | MP4::trunfirstSampleFlags | MP4::trunsampleDuration | MP4::trunsampleSize | MP4::trunsampleOffsets);
} else {
trun_box.setFlags(MP4::trundataOffset | MP4::trunsampleDuration | MP4::trunsampleSize);
}
trun_box.setFirstSampleFlags(0x00004002);
for (int i = 0; i < keyObj.getParts(); i++) {
MP4::trunSampleInformation trunSample;
trunSample.sampleSize = myMeta.tracks[tid].parts[i + partOffset].getSize();
keySize += myMeta.tracks[tid].parts[i + partOffset].getSize();
trunSample.sampleDuration = myMeta.tracks[tid].parts[i + partOffset].getDuration() * 10000;
if (myMeta.tracks[tid].type == "video") {
trunSample.sampleOffset = myMeta.tracks[tid].parts[i + partOffset].getOffset() * 10000;
}
trun_box.setSampleInformation(trunSample, i);
}
MP4::SDTP sdtp_box;
sdtp_box.setVersion(0);
if (myMeta.tracks[tid].type == "video") {
sdtp_box.setValue(36, 4);
for (int i = 1; i < keyObj.getParts(); i++) {
sdtp_box.setValue(20, 4 + i);
}
} else {
sdtp_box.setValue(40, 4);
for (int i = 1; i < keyObj.getParts(); i++) {
sdtp_box.setValue(40, 4 + i);
}
}
MP4::TRAF traf_box;
traf_box.setContent(tfhd_box, 0);
traf_box.setContent(trun_box, 1);
traf_box.setContent(sdtp_box, 2);
//If the stream is live, we want to have a fragref box if possible
//////HEREHEREHERE
if (myMeta.live) {
MP4::UUID_TrackFragmentReference fragref_box;
fragref_box.setVersion(1);
fragref_box.setFragmentCount(0);
int fragCount = 0;
for (unsigned int i = 0; fragCount < 2 && i < myMeta.tracks[tid].keys.size() - 1; i++) {
if (myMeta.tracks[tid].keys[i].getTime() > seekTime) {
DEBUG_MSG(DLVL_DEVEL, "Key %d added to fragRef box, time %ld > %lld", i, myMeta.tracks[tid].keys[i].getTime(), seekTime);
fragref_box.setTime(fragCount, myMeta.tracks[tid].keys[i].getTime() * 10000);
fragref_box.setDuration(fragCount, myMeta.tracks[tid].keys[i].getLength() * 10000);
fragref_box.setFragmentCount(++fragCount);
}
}
traf_box.setContent(fragref_box, 3);
}
MP4::MOOF moof_box;
moof_box.setContent(mfhd_box, 0);
moof_box.setContent(traf_box, 1);
//Setting the correct offsets.
moof_box.setContent(traf_box, 1);
trun_box.setDataOffset(moof_box.boxedSize() + 8);
traf_box.setContent(trun_box, 1);
moof_box.setContent(traf_box, 1);
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, myConn);
HTTP_S.Chunkify(moof_box.asBox(), moof_box.boxedSize(), myConn);
int size = htonl(keySize + 8);
HTTP_S.Chunkify((char *)&size, 4, myConn);
HTTP_S.Chunkify("mdat", 4, myConn);
sentHeader = true;
HTTP_R.Clean();
DEBUG_MSG(DLVL_DEVEL, "(%d) Sent full header", getpid());
}
///\brief Builds an index file for HTTP Smooth streaming.
///\return The index file for HTTP Smooth Streaming.
std::string OutHSS::smoothIndex(){
updateMeta();
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-16\"?>\n";
Result << "<SmoothStreamingMedia "
"MajorVersion=\"2\" "
"MinorVersion=\"0\" "
"TimeScale=\"10000000\" ";
std::deque<std::map<int, DTSC::Track>::iterator> audioIters;
std::deque<std::map<int, DTSC::Track>::iterator> videoIters;
long long int maxWidth = 0;
long long int maxHeight = 0;
long long int minWidth = 99999999;
long long int minHeight = 99999999;
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
if (it->second.codec == "AAC") {
audioIters.push_back(it);
}
if (it->second.codec == "H264") {
videoIters.push_back(it);
if (it->second.width > maxWidth) {
maxWidth = it->second.width;
}
if (it->second.width < minWidth) {
minWidth = it->second.width;
}
if (it->second.height > maxHeight) {
maxHeight = it->second.height;
}
if (it->second.height < minHeight) {
minHeight = it->second.height;
}
}
}
DEBUG_MSG(DLVL_DEVEL, "Buffer window here %lld", myMeta.bufferWindow);
if (myMeta.vod) {
Result << "Duration=\"" << (*videoIters.begin())->second.lastms << "0000\"";
} else {
Result << "Duration=\"0\" "
"IsLive=\"TRUE\" "
"LookAheadFragmentCount=\"2\" "
"DVRWindowLength=\"" << myMeta.bufferWindow << "0000\" "
"CanSeek=\"TRUE\" "
"CanPause=\"TRUE\" ";
}
Result << ">\n";
//Add audio entries
if (audioIters.size()) {
Result << "<StreamIndex "
"Type=\"audio\" "
"QualityLevels=\"" << audioIters.size() << "\" "
"Name=\"audio\" "
"Chunks=\"" << (*audioIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/A({start time})\">\n";
int index = 0;
for (std::deque<std::map<int, DTSC::Track>::iterator>::iterator it = audioIters.begin(); it != audioIters.end(); it++) {
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
for (unsigned int i = 0; i < (*it)->second.init.size(); i++) {
Result << std::setfill('0') << std::setw(2) << std::right << (int)(*it)->second.init[i];
}
Result << std::dec << "\" "
"SamplingRate=\"" << (*it)->second.rate << "\" "
"Channels=\"2\" "
"BitsPerSample=\"16\" "
"PacketSize=\"4\" "
"AudioTag=\"255\" "
"FourCC=\"AACL\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*audioIters.begin())->second.keys.size()) {
for (std::deque<DTSC::Key>::iterator it = (*audioIters.begin())->second.keys.begin(); it != (((*audioIters.begin())->second.keys.end()) - 1); it++) {
Result << "<c ";
if (it == (*audioIters.begin())->second.keys.begin()) {
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
//Add video entries
if (videoIters.size()) {
Result << "<StreamIndex "
"Type=\"video\" "
"QualityLevels=\"" << videoIters.size() << "\" "
"Name=\"video\" "
"Chunks=\"" << (*videoIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/V({start time})\" "
"MaxWidth=\"" << maxWidth << "\" "
"MaxHeight=\"" << maxHeight << "\" "
"DisplayWidth=\"" << maxWidth << "\" "
"DisplayHeight=\"" << maxHeight << "\">\n";
int index = 0;
for (std::deque<std::map<int, DTSC::Track>::iterator>::iterator it = videoIters.begin(); it != videoIters.end(); it++) {
//Add video qualities
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
MP4::AVCC avccbox;
avccbox.setPayload((*it)->second.init);
std::string tmpString = avccbox.asAnnexB();
for (unsigned int i = 0; i < tmpString.size(); i++) {
Result << std::setfill('0') << std::setw(2) << std::right << (int)tmpString[i];
}
Result << std::dec << "\" "
"MaxWidth=\"" << (*it)->second.width << "\" "
"MaxHeight=\"" << (*it)->second.height << "\" "
"FourCC=\"AVC1\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*videoIters.begin())->second.keys.size()) {
for (std::deque<DTSC::Key>::iterator it = (*videoIters.begin())->second.keys.begin(); it != (((*videoIters.begin())->second.keys.end()) - 1); it++) {
Result << "<c ";
if (it == (*videoIters.begin())->second.keys.begin()) {
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
Result << "</SmoothStreamingMedia>\n";
#if DEBUG >= 8
std::cerr << "Sending this manifest:" << std::endl << Result << std::endl;
#endif
return toUTF16(Result.str());
} //smoothIndex
void OutHSS::onRequest() {
sentHeader = false;
while (HTTP_R.Read(myConn)) {
DEBUG_MSG(DLVL_DEVEL, "(%d) Received request %s", getpid(), HTTP_R.getUrl().c_str());
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
initialize();
if (HTTP_R.url.find("Manifest") != std::string::npos) {
//Manifest, direct reply
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest = smoothIndex();
HTTP_S.SetBody(manifest);
HTTP_S.SendResponse("200", "OK", myConn);
HTTP_R.Clean();
} else {
parseData = true;
wantRequest = false;
}
}
}
void OutHSS::initialize() {
Output::initialize();
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
for (std::deque<DTSC::Key>::iterator it2 = it->second.keys.begin(); it2 != it->second.keys.end(); it2++) {
keyTimes[it->first].insert(it2->getTime());
}
}
}
}

29
src/output/output_hss.h Normal file
View file

@ -0,0 +1,29 @@
#include "output.h"
#include <mist/http_parser.h>
namespace Mist {
class OutHSS : public Output {
public:
OutHSS(Socket::Connection & conn);
~OutHSS();
static void init(Util::Config * cfg);
void onRequest();
void sendNext();
void initialize();
void onFail();
void sendHeader();
protected:
HTTP::Parser HTTP_S;
HTTP::Parser HTTP_R;
JSON::Value encryption;
std::string smoothIndex();
int canSeekms(unsigned int ms);
int keysToSend;
int myTrackStor;
int myKeyStor;
long long int playUntil;
};
}
typedef Mist::OutHSS mistOut;

View file

@ -0,0 +1,84 @@
#include "output_json.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
#include <iomanip>
namespace Mist {
OutJSON::OutJSON(Socket::Connection & conn) : Output(conn){
realTime = 0;
}
OutJSON::~OutJSON() {}
void OutJSON::init(Util::Config * cfg){
capa["desc"] = "Enables HTTP protocol JSON streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.json";
capa["url_match"] = "/$.json";
capa["url_handler"] = "http";
capa["url_type"] = "json";
capa["socket"] = "http_json";
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutJSON::sendNext(){
if(!first) {
myConn.SendNow(", ", 2);
}else{
if (jsonp == ""){
myConn.SendNow("[", 1);
}else{
myConn.SendNow(jsonp + "([");
}
first = false;
}
myConn.SendNow(currentPacket.toJSON().toString());
}
void OutJSON::sendHeader(){
HTTP::Parser HTTP_S;
FLV::Tag tag;
HTTP_S.SetHeader("Content-Type", "text/javascript");
HTTP_S.protocol = "HTTP/1.0";
myConn.SendNow(HTTP_S.BuildResponse("200", "OK"));
sentHeader = true;
}
bool OutJSON::onFinish(){
if (jsonp == ""){
myConn.SendNow("]\n\n", 3);
}else{
myConn.SendNow("]);\n\n", 5);
}
return false;
}
void OutJSON::onRequest(){
HTTP::Parser HTTP_R;
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request %s", HTTP_R.getUrl().c_str());
first = true;
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
jsonp = "";
if (HTTP_R.GetVar("callback") != ""){
jsonp = HTTP_R.GetVar("callback");
}
if (HTTP_R.GetVar("jsonp") != ""){
jsonp = HTTP_R.GetVar("jsonp");
}
initialize();
for (std::map<int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.type == "meta" ){
selectedTracks.insert(it->first);
}
}
seek(0);
parseData = true;
wantRequest = false;
HTTP_R.Clean();
}
}
}

20
src/output/output_json.h Normal file
View file

@ -0,0 +1,20 @@
#include "output.h"
namespace Mist {
class OutJSON : public Output {
public:
OutJSON(Socket::Connection & conn);
~OutJSON();
static void init(Util::Config * cfg);
void onRequest();
bool onFinish();
void sendNext();
void sendHeader();
protected:
std::string jsonp;
bool first;
};
}
typedef Mist::OutJSON mistOut;

View file

@ -0,0 +1,88 @@
#include "output_progressive_flv.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
namespace Mist {
OutProgressiveFLV::OutProgressiveFLV(Socket::Connection & conn) : Output(conn) { }
OutProgressiveFLV::~OutProgressiveFLV() {}
void OutProgressiveFLV::init(Util::Config * cfg){
capa["name"] = "HTTP_Progressive_FLV";
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.flv";
capa["url_match"] = "/$.flv";
capa["socket"] = "http_progressive_flv";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/7";
capa["methods"][0u]["priority"] = 5ll;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutProgressiveFLV::sendNext(){
FLV::Tag tag;
bool tmp = tag.DTSCLoader(currentPacket, myMeta.tracks[currentPacket.getTrackId()]);
if (!tmp){
DEBUG_MSG(DLVL_DEVEL, "Invalid JSON");
}
myConn.SendNow(tag.data, tag.len);
}
void OutProgressiveFLV::sendHeader(){
HTTP::Parser HTTP_S;
FLV::Tag tag;
HTTP_S.SetHeader("Content-Type", "video/x-flv");
HTTP_S.protocol = "HTTP/1.0";
myConn.SendNow(HTTP_S.BuildResponse("200", "OK"));
myConn.SendNow(FLV::Header, 13);
tag.DTSCMetaInit(myMeta, selectedTracks);
myConn.SendNow(tag.data, tag.len);
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++){
if (myMeta.tracks[*it].type == "video"){
tag.DTSCVideoInit(myMeta.tracks[*it]);
myConn.SendNow(tag.data, tag.len);
}
if (myMeta.tracks[*it].type == "audio"){
tag.DTSCAudioInit(myMeta.tracks[*it]);
myConn.SendNow(tag.data, tag.len);
}
}
sentHeader = true;
}
void OutProgressiveFLV::onFail(){
HTTP::Parser HTTP_S;
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
void OutProgressiveFLV::onRequest(){
HTTP::Parser HTTP_R;
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request %s", HTTP_R.getUrl().c_str());
if (HTTP_R.GetVar("audio") != ""){
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("audio")).asInt());
}
if (HTTP_R.GetVar("video") != ""){
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("video")).asInt());
}
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
parseData = true;
wantRequest = false;
HTTP_R.Clean();
}
}
}

View file

@ -0,0 +1,18 @@
#include "output.h"
namespace Mist {
class OutProgressiveFLV : public Output {
public:
OutProgressiveFLV(Socket::Connection & conn);
~OutProgressiveFLV();
static void init(Util::Config * cfg);
void onRequest();
void sendNext();
void onFail();
void sendHeader();
protected:
};
}
typedef Mist::OutProgressiveFLV mistOut;

View file

@ -0,0 +1,65 @@
#include "output_progressive_mp3.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
namespace Mist {
OutProgressiveMP3::OutProgressiveMP3(Socket::Connection & conn) : Output(conn) { }
OutProgressiveMP3::~OutProgressiveMP3() {}
void OutProgressiveMP3::init(Util::Config * cfg){
capa["name"] = "HTTP_Progressive_MP3";
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.mp3";
capa["url_match"] = "/$.mp3";
capa["socket"] = "http_progressive_mp3";
capa["codecs"][0u][0u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "mp3";
capa["methods"][0u]["priority"] = 8ll;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutProgressiveMP3::sendNext(){
char * dataPointer = 0;
int len = 0;
currentPacket.getString("data", dataPointer, len);
myConn.SendNow(dataPointer, len);
}
void OutProgressiveMP3::sendHeader(){
HTTP::Parser HTTP_S;
FLV::Tag tag;
HTTP_S.SetHeader("Content-Type", "audio/mpeg");
HTTP_S.protocol = "HTTP/1.0";
myConn.SendNow(HTTP_S.BuildResponse("200", "OK"));
sentHeader = true;
}
void OutProgressiveMP3::onFail(){
HTTP::Parser HTTP_S;
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
void OutProgressiveMP3::onRequest(){
HTTP::Parser HTTP_R;
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request %s", HTTP_R.getUrl().c_str());
if (HTTP_R.GetVar("audio") != ""){
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("audio")).asInt());
}
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
parseData = true;
wantRequest = false;
HTTP_R.Clean();
}
}
}

View file

@ -0,0 +1,18 @@
#include "output.h"
namespace Mist {
class OutProgressiveMP3 : public Output {
public:
OutProgressiveMP3(Socket::Connection & conn);
~OutProgressiveMP3();
static void init(Util::Config * cfg);
void onRequest();
void sendNext();
void onFail();
void sendHeader();
protected:
};
}
typedef Mist::OutProgressiveMP3 mistOut;

View file

@ -0,0 +1,558 @@
#include "output_progressive_mp4.h"
#include <mist/defines.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
namespace Mist {
OutProgressiveMP4::OutProgressiveMP4(Socket::Connection & conn) : Output(conn) { }
OutProgressiveMP4::~OutProgressiveMP4() {}
void OutProgressiveMP4::init(Util::Config * cfg){
capa["name"] = "HTTP_Progressive_MP4";
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.mp4";
capa["url_match"] = "/$.mp4";
capa["socket"] = "http_progressive_mp4";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/mp4";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
std::string OutProgressiveMP4::DTSCMeta2MP4Header(long long & size){
std::stringstream header;
//ftyp box
MP4::FTYP ftypBox;
header << std::string(ftypBox.asBox(),ftypBox.boxedSize());
uint64_t mdatSize = 0;
//moov box
MP4::MOOV moovBox;
unsigned int moovOffset = 0;
{
//calculating longest duration
long long int firstms = -1;
long long int lastms = -1;
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++) {
if (lastms == -1 || lastms < myMeta.tracks[*it].lastms){
lastms = myMeta.tracks[*it].lastms;
}
if (firstms == -1 || firstms > myMeta.tracks[*it].firstms){
firstms = myMeta.tracks[*it].firstms;
}
}
MP4::MVHD mvhdBox(lastms - firstms);
moovBox.setContent(mvhdBox, moovOffset++);
}
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++) {
MP4::TRAK trakBox;
{
{
MP4::TKHD tkhdBox(*it, myMeta.tracks[*it].lastms - myMeta.tracks[*it].firstms, myMeta.tracks[*it].width, myMeta.tracks[*it].height);
trakBox.setContent(tkhdBox, 0);
}{
MP4::MDIA mdiaBox;
unsigned int mdiaOffset = 0;
{
MP4::MDHD mdhdBox(myMeta.tracks[*it].lastms - myMeta.tracks[*it].firstms);
mdiaBox.setContent(mdhdBox, mdiaOffset++);
}//MDHD box
{
MP4::HDLR hdlrBox(myMeta.tracks[*it].type, myMeta.tracks[*it].getIdentifier());
mdiaBox.setContent(hdlrBox, mdiaOffset++);
}//hdlr box
{
MP4::MINF minfBox;
unsigned int minfOffset = 0;
if (myMeta.tracks[*it].type== "video"){
MP4::VMHD vmhdBox;
vmhdBox.setFlags(1);
minfBox.setContent(vmhdBox,minfOffset++);
}else if (myMeta.tracks[*it].type == "audio"){
MP4::SMHD smhdBox;
minfBox.setContent(smhdBox,minfOffset++);
}//type box
{
MP4::DINF dinfBox;
MP4::DREF drefBox;
dinfBox.setContent(drefBox,0);
minfBox.setContent(dinfBox,minfOffset++);
}//dinf box
{
MP4::STBL stblBox;
unsigned int offset = 0;
{
MP4::STSD stsdBox;
stsdBox.setVersion(0);
if (myMeta.tracks[*it].type == "video"){//boxname = codec
MP4::VisualSampleEntry vse;
if (myMeta.tracks[*it].codec == "H264"){
vse.setCodec("avc1");
}
vse.setDataReferenceIndex(1);
vse.setWidth(myMeta.tracks[*it].width);
vse.setHeight(myMeta.tracks[*it].height);
MP4::AVCC avccBox;
avccBox.setPayload(myMeta.tracks[*it].init);
vse.setCLAP(avccBox);
stsdBox.setEntry(vse,0);
}else if(myMeta.tracks[*it].type == "audio"){//boxname = codec
MP4::AudioSampleEntry ase;
if (myMeta.tracks[*it].codec == "AAC"){
ase.setCodec("mp4a");
ase.setDataReferenceIndex(1);
}
ase.setSampleRate(myMeta.tracks[*it].rate);
ase.setChannelCount(myMeta.tracks[*it].channels);
ase.setSampleSize(myMeta.tracks[*it].size);
//MP4::ESDS esdsBox(myMeta.tracks[*it].init, myMeta.tracks[*it].bps);
MP4::ESDS esdsBox;
//outputting these values first, so malloc isn't called as often.
esdsBox.setESHeaderStartCodes(myMeta.tracks[*it].init);
esdsBox.setSLValue(2);
esdsBox.setESDescriptorTypeLength(32+myMeta.tracks[*it].init.size());
esdsBox.setESID(2);
esdsBox.setStreamPriority(0);
esdsBox.setDecoderConfigDescriptorTypeLength(18 + myMeta.tracks[*it].init.size());
esdsBox.setByteObjectTypeID(0x40);
esdsBox.setStreamType(5);
esdsBox.setReservedFlag(1);
esdsBox.setBufferSize(1250000);
esdsBox.setMaximumBitRate(10000000);
esdsBox.setAverageBitRate(myMeta.tracks[*it].bps * 8);
esdsBox.setConfigDescriptorTypeLength(5);
esdsBox.setSLConfigDescriptorTypeTag(0x6);
esdsBox.setSLConfigExtendedDescriptorTypeTag(0x808080);
esdsBox.setSLDescriptorTypeLength(1);
ase.setCodecBox(esdsBox);
stsdBox.setEntry(ase,0);
}
stblBox.setContent(stsdBox,offset++);
}//stsd box
{
MP4::STTS sttsBox;
sttsBox.setVersion(0);
if (myMeta.tracks[*it].parts.size()){
for (unsigned int part = 0; part < myMeta.tracks[*it].parts.size(); part++){
MP4::STTSEntry newEntry;
newEntry.sampleCount = 1;
newEntry.sampleDelta = myMeta.tracks[*it].parts[part].getDuration();
sttsBox.setSTTSEntry(newEntry, part);
}
}
stblBox.setContent(sttsBox,offset++);
}//stts box
if (myMeta.tracks[*it].type == "video"){
//STSS Box here
MP4::STSS stssBox;
stssBox.setVersion(0);
int tmpCount = 0;
int tmpItCount = 0;
for ( std::deque< DTSC::Key>::iterator tmpIt = myMeta.tracks[*it].keys.begin(); tmpIt != myMeta.tracks[*it].keys.end(); tmpIt ++) {
stssBox.setSampleNumber(tmpCount,tmpItCount);
tmpCount += tmpIt->getParts();
tmpItCount ++;
}
stblBox.setContent(stssBox,offset++);
}//stss box
{
MP4::STSC stscBox;
stscBox.setVersion(0);
MP4::STSCEntry stscEntry;
stscEntry.firstChunk = 1;
stscEntry.samplesPerChunk = 1;
stscEntry.sampleDescriptionIndex = 1;
stscBox.setSTSCEntry(stscEntry, 0);
stblBox.setContent(stscBox,offset++);
}//stsc box
{
uint32_t total = 0;
MP4::STSZ stszBox;
stszBox.setVersion(0);
total = 0;
for (std::deque< DTSC::Part>::iterator partIt = myMeta.tracks[*it].parts.begin(); partIt != myMeta.tracks[*it].parts.end(); partIt ++) {
stszBox.setEntrySize(partIt->getSize(), total);//in bytes in file
size += partIt->getSize();
total++;
}
stblBox.setContent(stszBox,offset++);
}//stsz box
//add STCO boxes here
{
MP4::STCO stcoBox;
stcoBox.setVersion(1);
//Inserting empty values on purpose here, will be fixed later.
if (myMeta.tracks[*it].parts.size() != 0){
stcoBox.setChunkOffset(0, myMeta.tracks[*it].parts.size() - 1);//this inserts all empty entries at once
}
stblBox.setContent(stcoBox,offset++);
}//stco box
minfBox.setContent(stblBox,minfOffset++);
}//stbl box
mdiaBox.setContent(minfBox, mdiaOffset++);
}//minf box
trakBox.setContent(mdiaBox, 1);
}
}//trak Box
moovBox.setContent(trakBox, moovOffset++);
}
//initial offset length ftyp, length moov + 8
unsigned long long int byteOffset = ftypBox.boxedSize() + moovBox.boxedSize() + 8;
//update all STCO from the following map;
std::map <int, MP4::STCO> checkStcoBoxes;
//for all tracks
for (unsigned int i = 1; i < moovBox.getContentCount(); i++){
//10 lines to get the STCO box.
MP4::TRAK checkTrakBox;
MP4::Box checkMdiaBox;
MP4::Box checkTkhdBox;
MP4::MINF checkMinfBox;
MP4::STBL checkStblBox;
//MP4::STCO checkStcoBox;
checkTrakBox = ((MP4::TRAK&)moovBox.getContent(i));
for (unsigned int j = 0; j < checkTrakBox.getContentCount(); j++){
if (checkTrakBox.getContent(j).isType("mdia")){
checkMdiaBox = checkTrakBox.getContent(j);
break;
}
if (checkTrakBox.getContent(j).isType("tkhd")){
checkTkhdBox = checkTrakBox.getContent(j);
}
}
for (unsigned int j = 0; j < ((MP4::MDIA&)checkMdiaBox).getContentCount(); j++){
if (((MP4::MDIA&)checkMdiaBox).getContent(j).isType("minf")){
checkMinfBox = ((MP4::MINF&)((MP4::MDIA&)checkMdiaBox).getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkMinfBox.getContentCount(); j++){
if (checkMinfBox.getContent(j).isType("stbl")){
checkStblBox = ((MP4::STBL&)checkMinfBox.getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkStblBox.getContentCount(); j++){
if (checkStblBox.getContent(j).isType("stco")){
checkStcoBoxes.insert( std::pair<int, MP4::STCO>(((MP4::TKHD&)checkTkhdBox).getTrackID(), ((MP4::STCO&)checkStblBox.getContent(j)) ));
break;
}
}
}
//inserting right values in the STCO box header
//total = 0;
long long unsigned int totalByteOffset = 0;
//Current values are actual byte offset without header-sized offset
std::set <keyPart> sortSet;//filling sortset for interleaving parts
for (std::set<long unsigned int>::iterator subIt = selectedTracks.begin(); subIt != selectedTracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = myMeta.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = myMeta.tracks[*subIt].firstms + myMeta.tracks[*subIt].parts[0].getDuration();
temp.size = myMeta.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
while (!sortSet.empty()){
//setting the right STCO size in the STCO box
checkStcoBoxes[sortSet.begin()->trackID].setChunkOffset(totalByteOffset + byteOffset, sortSet.begin()->index);
totalByteOffset += sortSet.begin()->size;
//add keyPart to sortSet
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < myMeta.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + myMeta.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = myMeta.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
mdatSize = totalByteOffset+8;
header << std::string(moovBox.asBox(),moovBox.boxedSize());
header << (char)((mdatSize>>24) & 0xFF) << (char)((mdatSize>>16) & 0xFF) << (char)((mdatSize>>8) & 0xFF) << (char)(mdatSize & 0xFF) << "mdat";
//end of header
size += header.str().size();
return header.str();
}
/// Calculate a seekPoint, based on byteStart, metadata, tracks and headerSize.
/// The seekPoint will be set to the timestamp of the first packet to send.
void OutProgressiveMP4::findSeekPoint(long long byteStart, long long & seekPoint, unsigned int headerSize){
seekPoint = 0;
//if we're starting in the header, seekPoint is always zero.
if (byteStart <= headerSize){return;}
//okay, we're past the header. Substract the headersize from the starting postion.
byteStart -= headerSize;
//initialize a list of sorted parts that this file contains
std::set <keyPart> sortSet;
for (std::set<long unsigned int>::iterator subIt = selectedTracks.begin(); subIt != selectedTracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = myMeta.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = myMeta.tracks[*subIt].firstms + myMeta.tracks[*subIt].parts[0].getDuration();
temp.size = myMeta.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
//forward through the file by headers, until we reach the point where we need to be
while (!sortSet.empty()){
//substract the size of this fragment from byteStart
byteStart -= sortSet.begin()->size;
//if that put us past the point where we wanted to be, return right now
if (byteStart < 0){return;}
//otherwise, set seekPoint to where we are now
seekPoint = sortSet.begin()->time;
//then find the next part
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < myMeta.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + myMeta.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = myMeta.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
//If we're here, we're in the last fragment.
//That's technically legal, of course.
}
/// Parses a "Range: " header, setting byteStart, byteEnd and seekPoint using data from metadata and tracks to do
/// the calculations.
/// On error, byteEnd is set to zero.
void OutProgressiveMP4::parseRange(std::string header, long long & byteStart, long long & byteEnd, long long & seekPoint, unsigned int headerSize){
if (header.size() < 6 || header.substr(0, 6) != "bytes="){
byteEnd = 0;
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
return;
}
header.erase(0, 6);
if (header.size() && header[0] == '-'){
//negative range = count from end
byteStart = 0;
for (unsigned int i = 1; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (byteStart > byteEnd){
//entire file if starting before byte zero
byteStart = 0;
DEBUG_MSG(DLVL_DEVEL, "Full negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, headerSize);
return;
}else{
//start byteStart bytes before byteEnd
byteStart = byteEnd - byteStart;
DEBUG_MSG(DLVL_DEVEL, "Partial negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, headerSize);
return;
}
}else{
long long size = byteEnd;
byteEnd = 0;
byteStart = 0;
unsigned int i = 0;
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (header[i] != '-'){
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
byteEnd = 0;
return;
}
++i;
if (i < header.size()){
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteEnd *= 10;
byteEnd += header[i] - '0';
continue;
}
break;
}
if (byteEnd > size-1){byteEnd = size;}
}else{
byteEnd = size;
}
DEBUG_MSG(DLVL_DEVEL, "Range request: %lli-%lli (%s)", byteStart, byteEnd, header.c_str());
findSeekPoint(byteStart, seekPoint, headerSize);
return;
}
}
void OutProgressiveMP4::onRequest(){
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request: %s", HTTP_R.getUrl().c_str());
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
if (HTTP_R.GetVar("audio") != ""){
DEBUG_MSG(DLVL_DEVEL, "GetVar Aud = %s", HTTP_R.GetVar("audio").c_str());
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("audio")).asInt());
}else{
DEBUG_MSG(DLVL_DEVEL, "No audio param given");
}
if (HTTP_R.GetVar("video") != ""){
DEBUG_MSG(DLVL_DEVEL, "GetVar Vid = %s", HTTP_R.GetVar("video").c_str());
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("video")).asInt());
}else{
DEBUG_MSG(DLVL_DEVEL, "No video param given");
}
parseData = true;
wantRequest = false;
}
}
bool OutProgressiveMP4::onFinish(){
HTTP_R.Clean();
parseData = false;
wantRequest = true;
return true;
}
void OutProgressiveMP4::onFail(){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
void OutProgressiveMP4::sendNext(){
char * dataPointer = 0;
int len = 0;
currentPacket.getString("data", dataPointer, len);
//keep track of where we are - fast-forward until where we are now
while (!sortSet.empty() && ((long long)sortSet.begin()->trackID != currentPacket.getTrackId() || (long long)sortSet.begin()->time != currentPacket.getTime())){
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < myMeta.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + myMeta.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = myMeta.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
currPos += sortSet.begin()->size;
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
if (currPos >= byteStart){
sortSet.clear();//we don't need you anymore!
myConn.SendNow(dataPointer, std::min(leftOver, (long long)len));
//HTTP_S.Chunkify(Strm.lastData().data(), Strm.lastData().size(), conn);
leftOver -= len;
}else{
if (currPos + (long long)len > byteStart){
myConn.SendNow(dataPointer+(byteStart-currPos), len-(byteStart-currPos));
leftOver -= len-(byteStart-currPos);
currPos = byteStart;
sortSet.clear();//we don't need you anymore!
}
}
if (leftOver < 1){
//stop playback, wait for new request
stop();
wantRequest = true;
}
}
void OutProgressiveMP4::sendHeader(){
fileSize = 0;
std::string headerData = DTSCMeta2MP4Header(fileSize);
byteStart = 0;
byteEnd = fileSize - 1;
long long seekPoint = 0;
char rangeType = ' ';
if (HTTP_R.GetHeader("Range") != ""){
parseRange(HTTP_R.GetHeader("Range"), byteStart, byteEnd, seekPoint, headerData.size());
rangeType = HTTP_R.GetHeader("Range")[0];
}
sortSet.clear();
for (std::set<long unsigned int>::iterator subIt = selectedTracks.begin(); subIt != selectedTracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = myMeta.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = myMeta.tracks[*subIt].firstms + myMeta.tracks[*subIt].parts[0].getDuration();
temp.size = myMeta.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/MP4"); //Send the correct content-type for MP4 files
HTTP_S.SetHeader("Accept-Ranges", "bytes, parsec");
if (rangeType != ' '){
DEBUG_MSG(DLVL_DEVEL, "Ranged request");
if (!byteEnd){
if (rangeType == 'p'){
HTTP_S.SetBody("Starsystem not in communications range");
HTTP_S.SendResponse("416", "Starsystem not in communications range", myConn);
return;
}else{
HTTP_S.SetBody("Requested Range Not Satisfiable");
HTTP_S.SendResponse("416", "Requested Range Not Satisfiable", myConn);
return;
}
}else{
std::stringstream rangeReply;
rangeReply << "bytes " << byteStart << "-" << byteEnd << "/" << fileSize;
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that are > 1MiB
if (byteEnd - byteStart + 1 > 1024*1024){
HTTP_S.SetHeader("MistMultiplex", "No");
}
HTTP_S.SetHeader("Content-Range", rangeReply.str());
/// \todo Switch to chunked?
HTTP_S.SendResponse("206", "Partial content", myConn);
//HTTP_S.StartResponse("206", "Partial content", HTTP_R, conn);
}
}else{
DEBUG_MSG(DLVL_DEVEL, "Non-Ranged request");
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that aren't ranged
HTTP_S.SetHeader("MistMultiplex", "No");
/// \todo Switch to chunked?
HTTP_S.SendResponse("200", "OK", myConn);
//HTTP_S.StartResponse(HTTP_R, conn);
}
leftOver = byteEnd - byteStart + 1;//add one byte, because range "0-0" = 1 byte of data
currPos = 0;
if (byteStart < (long long)headerData.size()){
/// \todo Switch to chunked?
//HTTP_S.Chunkify(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart, conn);//send MP4 header
myConn.SendNow(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart);//send MP4 header
leftOver -= std::min((long long)headerData.size(), byteEnd) - byteStart;
}
currPos = headerData.size();//we're now guaranteed to be past the header point, no matter what
seek(seekPoint);
sentHeader = true;
}
}

View file

@ -0,0 +1,50 @@
#include "output.h"
#include <mist/http_parser.h>
namespace Mist {
struct keyPart{
public:
bool operator < (const keyPart& rhs) const {
if (time < rhs.time){
return true;
}
if (time == rhs.time){
if (trackID < rhs.trackID){
return true;
}
}
return false;
}
long unsigned int trackID;
long unsigned int size;
long long unsigned int time;
long long unsigned int endTime;
long unsigned int index;
};
class OutProgressiveMP4 : public Output {
public:
OutProgressiveMP4(Socket::Connection & conn);
~OutProgressiveMP4();
static void init(Util::Config * cfg);
void parseRange(std::string header, long long & byteStart, long long & byteEnd, long long & seekPoint, unsigned int headerSize);
std::string DTSCMeta2MP4Header(long long & size);
void findSeekPoint(long long byteStart, long long & seekPoint, unsigned int headerSize);
void onRequest();
void sendNext();
bool onFinish();
void sendHeader();
void onFail();
protected:
long long fileSize;
long long byteStart;
long long byteEnd;
long long leftOver;
long long currPos;
std::set <keyPart> sortSet;//filling sortset for interleaving parts
HTTP::Parser HTTP_R, HTTP_S;
};
}
typedef Mist::OutProgressiveMP4 mistOut;

69
src/output/output_raw.cpp Normal file
View file

@ -0,0 +1,69 @@
#include "output_raw.h"
namespace Mist {
OutRaw::OutRaw(Socket::Connection & conn) : Output(conn) {
streamName = config->getString("streamname");
initialize();
selectedTracks.clear();
std::string tracks = config->getString("tracks");
unsigned int currTrack = 0;
//loop over tracks, add any found track IDs to selectedTracks
if (tracks != ""){
for (unsigned int i = 0; i < tracks.size(); ++i){
if (tracks[i] >= '0' && tracks[i] <= '9'){
currTrack = currTrack*10 + (tracks[i] - '0');
}else{
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
currTrack = 0;
}
}
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
}
parseData = true;
seek(config->getInteger("seek"));
}
OutRaw::~OutRaw() {}
void OutRaw::init(Util::Config * cfg){
capa["name"] = "RAW";
capa["desc"] = "Enables raw DTSC over TCP.";
capa["deps"] = "";
capa["required"]["streamname"]["name"] = "Stream";
capa["required"]["streamname"]["help"] = "What streamname to serve. For multiple streams, add this protocol multiple times using different ports.";
capa["required"]["streamname"]["type"] = "str";
capa["required"]["streamname"]["option"] = "--stream";
capa["optional"]["tracks"]["name"] = "Tracks";
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
capa["optional"]["seek"]["name"] = "Seek point";
capa["optional"]["seek"]["help"] = "The time in milliseconds to seek to, 0 by default.";
capa["optional"]["seek"]["type"] = "int";
capa["optional"]["seek"]["option"] = "--seek";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
cfg->addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
cfg->addOption("tracks",
JSON::fromString("{\"arg\":\"string\",\"value\":[\"\"],\"short\": \"t\",\"long\":\"tracks\",\"help\":\"The track IDs of the stream that this connector will transmit separated by spaces.\"}"));
cfg->addOption("seek",
JSON::fromString("{\"arg\":\"integer\",\"value\":[0],\"short\": \"S\",\"long\":\"seek\",\"help\":\"The time in milliseconds to seek to, 0 by default.\"}"));
cfg->addConnectorOptions(666, capa);
config = cfg;
}
void OutRaw::sendNext(){
myConn.SendNow(currentPacket.getData(), currentPacket.getDataLen());
}
void OutRaw::sendHeader(){
myMeta.send(myConn);
sentHeader = true;
}
}

15
src/output/output_raw.h Normal file
View file

@ -0,0 +1,15 @@
#include "output.h"
namespace Mist {
class OutRaw : public Output {
public:
OutRaw(Socket::Connection & conn);
~OutRaw();
static void init(Util::Config * cfg);
void sendNext();
void sendHeader();
};
}
typedef Mist::OutRaw mistOut;

754
src/output/output_rtmp.cpp Normal file
View file

@ -0,0 +1,754 @@
#include "output_rtmp.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
#include <mist/stream.h>
#include <cstring>
#include <cstdlib>
namespace Mist {
OutRTMP::OutRTMP(Socket::Connection & conn) : Output(conn) {
playTransaction = -1;
playMessageType = -1;
playStreamId = -1;
setBlocking(false);
while (!conn.Received().available(1537) && conn.connected()) {
conn.spool();
Util::sleep(5);
}
RTMPStream::handshake_in = conn.Received().remove(1537);
RTMPStream::rec_cnt += 1537;
if (RTMPStream::doHandshake()) {
conn.SendNow(RTMPStream::handshake_out);
while (!conn.Received().available(1536) && conn.connected()) {
conn.spool();
Util::sleep(5);
}
conn.Received().remove(1536);
RTMPStream::rec_cnt += 1536;
DEBUG_MSG(DLVL_HIGH, "Handshake success!");
} else {
DEBUG_MSG(DLVL_DEVEL, "Handshake fail!");
}
counter = 0;
sending = false;
streamReset = false;
}
OutRTMP::~OutRTMP() {}
void OutRTMP::init(Util::Config * cfg) {
capa["name"] = "RTMP";
capa["desc"] = "Enables the RTMP protocol which is used by Adobe Flash Player.";
capa["deps"] = "";
capa["url_rel"] = "/play/$";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "rtmp";
capa["methods"][0u]["type"] = "flash/10";
capa["methods"][0u]["priority"] = 6ll;
cfg->addConnectorOptions(1935, capa);
config = cfg;
}
void OutRTMP::sendNext() {
//sent a tag
FLV::Tag tag;
if (tag.DTSCLoader(currentPacket, myMeta.tracks[currentPacket.getTrackId()])) {
if (tag.len) {
myConn.SendNow(RTMPStream::SendMedia(tag));
#if DEBUG >= 8
fprintf(stderr, "Sent tag to %i: [%u] %s\n", myConn.getSocket(), tag.tagTime(), tag.tagType().c_str());
#endif
}
}
}
void OutRTMP::sendHeader() {
FLV::Tag tag;
tag.DTSCMetaInit(myMeta, selectedTracks);
if (tag.len) {
myConn.SendNow(RTMPStream::SendMedia(tag));
}
for (std::set<long unsigned int>::iterator it = selectedTracks.begin(); it != selectedTracks.end(); it++) {
if (myMeta.tracks[*it].type == "video") {
tag.DTSCVideoInit(myMeta.tracks[*it]);
if (tag.len) {
myConn.SendNow(RTMPStream::SendMedia(tag));
}
}
if (myMeta.tracks[*it].type == "audio") {
tag.DTSCAudioInit(myMeta.tracks[*it]);
if (tag.len) {
myConn.SendNow(RTMPStream::SendMedia(tag));
}
}
}
sentHeader = true;
}
void OutRTMP::onRequest() {
parseChunk(myConn.Received());
}
///\brief Sends a RTMP command either in AMF or AMF3 mode.
///\param amfReply The data to be sent over RTMP.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void OutRTMP::sendCommand(AMF::Object & amfReply, int messageType, int streamId) {
#if DEBUG >= 8
std::cerr << amfReply.Print() << std::endl;
#endif
if (messageType == 17) {
myConn.SendNow(RTMPStream::SendChunk(3, messageType, streamId, (char)0 + amfReply.Pack()));
} else {
myConn.SendNow(RTMPStream::SendChunk(3, messageType, streamId, amfReply.Pack()));
}
} //sendCommand
///\brief Parses a single AMF command message, and sends a direct response through sendCommand().
///\param amfData The received request.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void OutRTMP::parseAMFCommand(AMF::Object & amfData, int messageType, int streamId) {
#if DEBUG >= 5
fprintf(stderr, "Received command: %s\n", amfData.Print().c_str());
#endif
#if DEBUG >= 8
fprintf(stderr, "AMF0 command: %s\n", amfData.getContentP(0)->StrValue().c_str());
#endif
if (amfData.getContentP(0)->StrValue() == "connect") {
double objencoding = 0;
if (amfData.getContentP(2)->getContentP("objectEncoding")) {
objencoding = amfData.getContentP(2)->getContentP("objectEncoding")->NumValue();
}
#if DEBUG >= 6
int tmpint;
if (amfData.getContentP(2)->getContentP("videoCodecs")) {
tmpint = (int)amfData.getContentP(2)->getContentP("videoCodecs")->NumValue();
if (tmpint & 0x04) {
fprintf(stderr, "Sorensen video support detected\n");
}
if (tmpint & 0x80) {
fprintf(stderr, "H264 video support detected\n");
}
}
if (amfData.getContentP(2)->getContentP("audioCodecs")) {
tmpint = (int)amfData.getContentP(2)->getContentP("audioCodecs")->NumValue();
if (tmpint & 0x04) {
fprintf(stderr, "MP3 audio support detected\n");
}
if (tmpint & 0x400) {
fprintf(stderr, "AAC audio support detected\n");
}
}
#endif
app_name = amfData.getContentP(2)->getContentP("tcUrl")->StrValue();
app_name = app_name.substr(app_name.find('/', 7) + 1);
RTMPStream::chunk_snd_max = 4096;
myConn.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
myConn.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
myConn.Send(RTMPStream::SendCTL(6, RTMPStream::rec_window_size)); //send rec window acknowledgement size (msg 6)
myConn.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("")); //server properties
amfReply.getContentP(2)->addContent(AMF::Object("fmsVer", "FMS/3,5,5,2004"));
amfReply.getContentP(2)->addContent(AMF::Object("capabilities", (double)31));
amfReply.getContentP(2)->addContent(AMF::Object("mode", (double)1));
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetConnection.Connect.Success"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Connection succeeded."));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", 1337));
amfReply.getContentP(3)->addContent(AMF::Object("objectEncoding", objencoding));
//amfReply.getContentP(3)->addContent(AMF::Object("data", AMF::AMF0_ECMA_ARRAY));
//amfReply.getContentP(3)->getContentP(4)->addContent(AMF::Object("version", "3,5,4,1004"));
sendCommand(amfReply, messageType, streamId);
//send onBWDone packet - no clue what it is, but real server sends it...
//amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
//amfReply.addContent(AMF::Object("", "onBWDone"));//result
//amfReply.addContent(amfData.getContent(1));//same transaction ID
//amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null
//sendCommand(amfReply, messageType, streamId);
return;
} //connect
if (amfData.getContentP(0)->StrValue() == "createStream") {
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)1)); //stream ID - we use 1
sendCommand(amfReply, messageType, streamId);
myConn.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
return;
} //createStream
if ((amfData.getContentP(0)->StrValue() == "closeStream") || (amfData.getContentP(0)->StrValue() == "deleteStream")) {
stop();
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCUnpublish") || (amfData.getContentP(0)->StrValue() == "releaseStream")) {
// ignored
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCPublish")) {
//send a FCPublic reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onFCPublish")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Please followup with publish command..."));
sendCommand(amfReply, messageType, streamId);
return;
} //FCPublish
if (amfData.getContentP(0)->StrValue() == "releaseStream") {
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", AMF::AMF0_UNDEFINED)); //stream ID?
sendCommand(amfReply, messageType, streamId);
return;
}//releaseStream
if ((amfData.getContentP(0)->StrValue() == "getStreamLength") || (amfData.getContentP(0)->StrValue() == "getMovLen")) {
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0)); //zero length
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if ((amfData.getContentP(0)->StrValue() == "publish")) {
if (amfData.getContentP(3)) {
streamName = amfData.getContentP(3)->StrValue();
//pull the server configuration
JSON::Value servConf = JSON::fromFile(Util::getTmpFolder() + "streamlist");
if (servConf.isMember("streams") && servConf["streams"].isMember(streamName)){
JSON::Value & streamConfig = servConf["streams"][streamName];
if (!streamConfig.isMember("source") || streamConfig["source"].asStringRef().substr(0, 7) != "push://"){
DEBUG_MSG(DLVL_FAIL, "Push rejected - stream not a push-able stream. (%s != push://*)", streamConfig["source"].asStringRef().c_str());
myConn.close();
return;
}
std::string source = streamConfig["source"].asStringRef().substr(7);
std::string IP = source.substr(0, source.find('@'));
if (IP != ""){
if (!myConn.isAddress(IP)){
DEBUG_MSG(DLVL_FAIL, "Push rejected - source host not whitelisted");
myConn.close();
return;
}
}
}else{
DEBUG_MSG(DLVL_FAIL, "Push rejected - stream not configured.");
myConn.close();
return;
}
initialize();
}
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", 1, AMF::AMF0_BOOL)); //publish success?
sendCommand(amfReply, messageType, streamId);
myConn.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a status reply
amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Stream is now published!"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if (amfData.getContentP(0)->StrValue() == "checkBandwidth") {
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
sendCommand(amfReply, messageType, streamId);
return;
} //checkBandwidth
if ((amfData.getContentP(0)->StrValue() == "play") || (amfData.getContentP(0)->StrValue() == "play2")) {
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
int playTransaction = amfData.getContentP(1)->NumValue();
int playMessageType = messageType;
int playStreamId = streamId;
streamName = amfData.getContentP(3)->StrValue();
initialize();
//send a status reply
AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting..."));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
//send streamisrecorded if stream, well, is recorded.
if (myMeta.vod) { //isMember("length") && Strm.metadata["length"].asInt() > 0){
myConn.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1
}
//send streambegin
myConn.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//and more reply
amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!"));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
RTMPStream::chunk_snd_max = 102400; //100KiB
myConn.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
//send dunno?
myConn.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1
parseData = true;
return;
} //play
if ((amfData.getContentP(0)->StrValue() == "seek")) {
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
int playTransaction = amfData.getContentP(1)->NumValue();
int playMessageType = messageType;
int playStreamId = streamId;
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Seek.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Seeking to the specified time"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
seek((long long int)amfData.getContentP(3)->NumValue());
//send a status reply
AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting..."));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
//send streamisrecorded if stream, well, is recorded.
if (myMeta.vod) { //isMember("length") && Strm.metadata["length"].asInt() > 0){
myConn.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1
}
//send streambegin
myConn.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//and more reply
amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!"));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
RTMPStream::chunk_snd_max = 102400; //100KiB
myConn.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
//send dunno?
myConn.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1
return;
} //seek
if ((amfData.getContentP(0)->StrValue() == "pauseRaw") || (amfData.getContentP(0)->StrValue() == "pause")) {
if (amfData.getContentP(3)->NumValue()) {
parseData = false;
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Pause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Pausing playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
} else {
parseData = true;
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Unpause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Resuming playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
}
return;
} //seek
#if DEBUG >= 2
fprintf(stderr, "AMF0 command not processed!\n%s\n", amfData.Print().c_str());
#endif
} //parseAMFCommand
void OutRTMP::bufferPacket(JSON::Value & pack){
if (!trackMap.count(pack["trackid"].asInt())){
//declined track;
return;
}
pack["trackid"] = trackMap[pack["trackid"].asInt()];
long long unsigned int tNum = pack["trackid"].asInt();
if (!bookKeeping.count(tNum)){
return;
}
int pageNum = bookKeeping[tNum].pageNum;
std::string tmp = pack.toNetPacked();
if (bookKeeping[tNum].curOffset > 8388608 && pack.isMember("keyframe") && pack["keyframe"]){
Util::sleep(500);
//open new page
char nextPage[100];
sprintf(nextPage, "%s%llu_%d", streamName.c_str(), tNum, bookKeeping[tNum].pageNum + bookKeeping[tNum].keyNum);
curPages[tNum].init(nextPage, 0, false);
bookKeeping[tNum].pageNum += bookKeeping[tNum].keyNum;
bookKeeping[tNum].keyNum = 0;
bookKeeping[tNum].curOffset = 0;
}
if (bookKeeping[tNum].curOffset + tmp.size() < curPages[tNum].len){
bookKeeping[tNum].keyNum += (pack.isMember("keyframe") && pack["keyframe"]);
memcpy(curPages[tNum].mapped + bookKeeping[tNum].curOffset, tmp.data(), tmp.size());
bookKeeping[tNum].curOffset += tmp.size();
}else{
bookKeeping[tNum].curOffset += tmp.size();
DEBUG_MSG(DLVL_WARN, "Can't buffer frame on page %d, track %llu, time %lld, keyNum %d, offset %llu", pageNum, tNum, pack["time"].asInt(), bookKeeping[tNum].pageNum + bookKeeping[tNum].keyNum, bookKeeping[tNum].curOffset);
///\todo Open next page plx
}
playerConn.keepAlive();
}
void OutRTMP::negotiatePushTracks() {
char * tmp = playerConn.getData();
if (!tmp){
DEBUG_MSG(DLVL_FAIL, "No userpage allocated");
return;
}
memset(tmp, 0, 30);
unsigned int i = 0;
for (std::map<int, DTSC::Track>::iterator it = meta_out.tracks.begin(); it != meta_out.tracks.end() && i < 5; it++){
DEBUG_MSG(DLVL_DEVEL, "Negotiating tracknum for id %d", it->first);
(tmp + 6 * i)[0] = 0x80;
(tmp + 6 * i)[1] = 0x00;
(tmp + 6 * i)[2] = 0x00;
(tmp + 6 * i)[3] = 0x00;
(tmp + 6 * i)[4] = (it->first >> 8) & 0xFF;
(tmp + 6 * i)[5] = (it->first) & 0xFF;
i++;
}
playerConn.keepAlive();
bool gotAllNumbers = false;
while (!gotAllNumbers){
Util::sleep(100);
gotAllNumbers = true;
i = 0;
for (std::map<int, DTSC::Track>::iterator it = meta_out.tracks.begin(); it != meta_out.tracks.end() && i < 5; it++){
unsigned long tNum = (((long)(tmp + (6 * i))[0]) << 24) | (((long)(tmp + (6 * i))[1]) << 16) | (((long)(tmp + (6 * i))[2]) << 8) | (long)(tmp + (6 * i))[3];
unsigned short oldNum = (((long)(tmp + (6 * i))[4]) << 8) | (long)(tmp + (6 * i))[5];
if( tNum & 0x80000000){
gotAllNumbers = false;
break;
}else{
DEBUG_MSG(DLVL_DEVEL, "Mapped %d -> %lu", oldNum, tNum);
trackMap[oldNum] = tNum;
}
i++;
}
}
for (std::map<int, int>::iterator it = trackMap.begin(); it != trackMap.end(); it++){
char tmp[100];
sprintf( tmp, "liveStream_%s%d", streamName.c_str(), it->second);
metaPages[it->second].init(std::string(tmp), 0, false);
DTSC::Meta tmpMeta = meta_out;
tmpMeta.tracks.clear();
tmpMeta.tracks[it->second] = meta_out.tracks[it->first];
tmpMeta.tracks[it->second].trackID = it->second;
JSON::Value tmpVal = tmpMeta.toJSON();
std::string tmpStr = tmpVal.toNetPacked();
memcpy(metaPages[it->second].mapped, tmpStr.data(), tmpStr.size());
DEBUG_MSG(DLVL_DEVEL, "Written meta for track %d", it->second);
}
gotAllNumbers = false;
while (!gotAllNumbers){
Util::sleep(100);
gotAllNumbers = true;
i = 0;
unsigned int j = 0;
//update Metadata;
JSON::Value jsonMeta;
JSON::fromDTMI((const unsigned char*)streamIndex.mapped + 8, streamIndex.len - 8, j, jsonMeta);
myMeta = DTSC::Meta(jsonMeta);
tmp = playerConn.getData();
for (std::map<int, DTSC::Track>::iterator it = meta_out.tracks.begin(); it != meta_out.tracks.end() && i < 5; it++){
unsigned long tNum = (((long)(tmp + (6 * i))[0]) << 24) | (((long)(tmp + (6 * i))[1]) << 16) | (((long)(tmp + (6 * i))[2]) << 8) | (long)(tmp + (6 * i))[3];
if( tNum == 0xFFFFFFFF){
DEBUG_MSG(DLVL_DEVEL, "Skipping a declined track");
i++;
continue;
}
if(!myMeta.tracks.count(tNum)){
gotAllNumbers = false;
break;
}
i++;
}
}
i = 0;
tmp = playerConn.getData();
for (std::map<int, DTSC::Track>::iterator it = meta_out.tracks.begin(); it != meta_out.tracks.end() && i < 5; it++){
unsigned long tNum = ((long)(tmp[6*i]) << 24) | ((long)(tmp[6 * i + 1]) << 16) | ((long)(tmp[6 * i + 2]) << 8) | tmp[6 * i + 3];
if( tNum == 0xFFFFFFFF){
tNum = ((long)(tmp[6 * i + 4]) << 8) | (long)tmp[6 * i + 5];
DEBUG_MSG(DLVL_WARN, "Buffer declined track %i", trackMap[tNum]);
trackMap.erase(tNum);
tmp[6*i] = 0;
tmp[6*i+1] = 0;
tmp[6*i+2] = 0;
tmp[6*i+3] = 0;
tmp[6*i+4] = 0;
tmp[6*i+5] = 0;
}else{
char firstPage[100];
sprintf(firstPage, "%s%lu_%d", streamName.c_str(), tNum, 0);
curPages[tNum].init(firstPage, 0, false);
bookKeeping[tNum] = DTSCPageData();
DEBUG_MSG(DLVL_WARN, "Buffer accepted track %lu", tNum);
}
i++;
}
}
///\brief Gets and parses one RTMP chunk at a time.
///\param inputBuffer A buffer filled with chunk data.
void OutRTMP::parseChunk(Socket::Buffer & inputBuffer) {
//for DTSC conversion
static std::stringstream prebuffer; // Temporary buffer before sending real data
//for chunk parsing
static RTMPStream::Chunk next;
static FLV::Tag F;
static AMF::Object amfdata("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object amfelem("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object3 amf3data("empty", AMF::AMF3_DDV_CONTAINER);
static AMF::Object3 amf3elem("empty", AMF::AMF3_DDV_CONTAINER);
while (next.Parse(inputBuffer)) {
//send ACK if we received a whole window
if ((RTMPStream::rec_cnt - RTMPStream::rec_window_at > RTMPStream::rec_window_size)) {
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
myConn.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
}
switch (next.msg_type_id) {
case 0: //does not exist
#if DEBUG >= 2
fprintf(stderr, "UNKN: Received a zero-type message. Possible data corruption? Aborting!\n");
#endif
while (inputBuffer.size()) {
inputBuffer.get().clear();
}
stop();
myConn.close();
break; //happens when connection breaks unexpectedly
case 1: //set chunk size
RTMPStream::chunk_rec_max = ntohl(*(int *)next.data.c_str());
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set chunk size: %i\n", RTMPStream::chunk_rec_max);
#endif
break;
case 2: //abort message - we ignore this one
#if DEBUG >= 5
fprintf(stderr, "CTRL: Abort message\n");
#endif
//4 bytes of stream id to drop
break;
case 3: //ack
#if DEBUG >= 8
fprintf(stderr, "CTRL: Acknowledgement\n");
#endif
RTMPStream::snd_window_at = ntohl(*(int *)next.data.c_str());
RTMPStream::snd_window_at = RTMPStream::snd_cnt;
break;
case 4: {
//2 bytes event type, rest = event data
//types:
//0 = stream begin, 4 bytes ID
//1 = stream EOF, 4 bytes ID
//2 = stream dry, 4 bytes ID
//3 = setbufferlen, 4 bytes ID, 4 bytes length
//4 = streamisrecorded, 4 bytes ID
//6 = pingrequest, 4 bytes data
//7 = pingresponse, 4 bytes data
//we don't need to process this
#if DEBUG >= 5
short int ucmtype = ntohs(*(short int *)next.data.c_str());
switch (ucmtype) {
case 0:
fprintf(stderr, "CTRL: UCM StreamBegin %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
case 1:
fprintf(stderr, "CTRL: UCM StreamEOF %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
case 2:
fprintf(stderr, "CTRL: UCM StreamDry %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
case 3:
fprintf(stderr, "CTRL: UCM SetBufferLength %i %i\n", ntohl(*((int *)(next.data.c_str() + 2))), ntohl(*((int *)(next.data.c_str() + 6))));
break;
case 4:
fprintf(stderr, "CTRL: UCM StreamIsRecorded %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
case 6:
fprintf(stderr, "CTRL: UCM PingRequest %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
case 7:
fprintf(stderr, "CTRL: UCM PingResponse %i\n", ntohl(*((int *)(next.data.c_str() + 2))));
break;
default:
fprintf(stderr, "CTRL: UCM Unknown (%hi)\n", ucmtype);
break;
}
#endif
}
break;
case 5: //window size of other end
#if DEBUG >= 5
fprintf(stderr, "CTRL: Window size\n");
#endif
RTMPStream::rec_window_size = ntohl(*(int *)next.data.c_str());
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
myConn.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
break;
case 6:
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set peer bandwidth\n");
#endif
//4 bytes window size, 1 byte limit type (ignored)
RTMPStream::snd_window_size = ntohl(*(int *)next.data.c_str());
myConn.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
break;
case 8: //audio data
case 9: //video data
case 18: {//meta data
if (!isInitialized) {
DEBUG_MSG(DLVL_MEDIUM, "Received useless media data\n");
myConn.close();
break;
}
if (streamReset) {
//reset push data to empty, in case stream properties change
meta_out.reset();
preBuf.clear();
sending = false;
counter = 0;
streamReset = false;
}
F.ChunkLoader(next);
JSON::Value pack_out = F.toJSON(meta_out);
if ( !pack_out.isNull()){
if ( !sending){
counter++;
if (counter > 8){
sending = true;
negotiatePushTracks();
for (std::deque<JSON::Value>::iterator it = preBuf.begin(); it != preBuf.end(); it++){
bufferPacket((*it));
}
preBuf.clear(); //clear buffer
bufferPacket(pack_out);
}else{
preBuf.push_back(pack_out);
}
}else{
bufferPacket(pack_out);
}
}
break;
}
case 15:
DEBUG_MSG(DLVL_MEDIUM, "Received AMF3 data message");
break;
case 16:
DEBUG_MSG(DLVL_MEDIUM, "Received AMF3 shared object");
break;
case 17: {
DEBUG_MSG(DLVL_MEDIUM, "Received AMF3 command message");
if (next.data[0] != 0) {
next.data = next.data.substr(1);
amf3data = AMF::parse3(next.data);
#if DEBUG >= 5
amf3data.Print();
#endif
} else {
DEBUG_MSG(DLVL_MEDIUM, "Received AMF3-0 command message");
next.data = next.data.substr(1);
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 17, next.msg_stream_id);
} //parsing AMF0-style
}
break;
case 19:
DEBUG_MSG(DLVL_MEDIUM, "Received AMF0 shared object");
break;
case 20: { //AMF0 command message
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 20, next.msg_stream_id);
}
break;
case 22:
DEBUG_MSG(DLVL_MEDIUM, "Received aggregate message");
break;
default:
DEBUG_MSG(DLVL_FAIL, "Unknown chunk received! Probably protocol corruption, stopping parsing of incoming data.");
break;
}
}
}
}

48
src/output/output_rtmp.h Normal file
View file

@ -0,0 +1,48 @@
#include "output.h"
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/rtmpchunks.h>
namespace Mist {
struct DTSCPageData {
DTSCPageData() : pageNum(0), keyNum(0), partNum(0), dataSize(0), curOffset(0), firstTime(0){}
int pageNum;///<The current page number
int keyNum;///<The number of keyframes in this page.
int partNum;///<The number of parts in this page.
unsigned long long int dataSize;///<The full size this page should be.
unsigned long long int curOffset;///<The current write offset in the page.
unsigned long long int firstTime;///<The first timestamp of the page.
};
class OutRTMP : public Output {
public:
OutRTMP(Socket::Connection & conn);
~OutRTMP();
static void init(Util::Config * cfg);
void onRequest();
void sendNext();
void sendHeader();
void bufferPacket(JSON::Value & pack);
protected:
DTSC::Meta meta_out;
void negotiatePushTracks();
std::string app_name;
bool sending;
int counter;
bool streamReset;
int playTransaction;///<The transaction number of the reply.
int playStreamId;///<The stream id of the reply.
int playMessageType;///<The message type of the reply.
void parseChunk(Socket::Buffer & inputBuffer);
void parseAMFCommand(AMF::Object & amfData, int messageType, int streamId);
void sendCommand(AMF::Object & amfReply, int messageType, int streamId);
std::deque<JSON::Value> preBuf;
std::map<int,int> trackMap;
std::map<int,IPC::sharedPage> metaPages;
std::map<int,DTSCPageData> bookKeeping;
};
}
typedef Mist::OutRTMP mistOut;

82
src/output/output_srt.cpp Normal file
View file

@ -0,0 +1,82 @@
#include "output_srt.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
#include <iomanip>
namespace Mist {
OutProgressiveSRT::OutProgressiveSRT(Socket::Connection & conn) : Output(conn) {
realTime = 0;
}
void OutProgressiveSRT::onFail(){
HTTP::Parser HTTP_S;
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("Stream not found. Sorry, we tried.");
HTTP_S.SendResponse("404", "Stream not found", myConn);
Output::onFail();
}
OutProgressiveSRT::~OutProgressiveSRT() {}
void OutProgressiveSRT::init(Util::Config * cfg){
capa["desc"] = "Enables HTTP protocol subtitle streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.srt";
capa["url_match"] = "/$.srt";
capa["url_handler"] = "http";
capa["url_type"] = "subtitle";
capa["socket"] = "http_srt";
cfg->addBasicConnectorOptions(capa);
config = cfg;
}
void OutProgressiveSRT::sendNext(){
char * dataPointer = 0;
int len = 0;
currentPacket.getString("data", dataPointer, len);
std::stringstream tmp;
if(!webVTT) {
tmp << lastNum++ << std::endl;
}
long long unsigned int time = currentPacket.getTime();
char tmpBuf[50];
int tmpLen = sprintf(tmpBuf, "%0.2llu:%0.2llu:%0.2llu,%0.3llu", (time / 3600000), ((time % 3600000) / 60000), (((time % 3600000) % 60000) / 1000), time % 1000);
tmp.write(tmpBuf, tmpLen);
tmp << " --> ";
time += currentPacket.getInt("duration");
tmpLen = sprintf(tmpBuf, "%0.2llu:%0.2llu:%0.2llu,%0.3llu", (time / 3600000), ((time % 3600000) / 60000), (((time % 3600000) % 60000) / 1000), time % 1000);
tmp.write(tmpBuf, tmpLen);
tmp << std::endl;
myConn.SendNow(tmp.str());
myConn.SendNow(dataPointer, len);
myConn.SendNow("\n");
}
void OutProgressiveSRT::sendHeader(){
HTTP::Parser HTTP_S;
FLV::Tag tag;
HTTP_S.SetHeader("Content-Type", "text/plain");
HTTP_S.protocol = "HTTP/1.0";
myConn.SendNow(HTTP_S.BuildResponse("200", "OK"));
sentHeader = true;
}
void OutProgressiveSRT::onRequest(){
HTTP::Parser HTTP_R;
while (HTTP_R.Read(myConn)){
DEBUG_MSG(DLVL_DEVEL, "Received request %s", HTTP_R.getUrl().c_str());
lastNum = 0;
webVTT = (HTTP_R.url.find(".webvtt") != std::string::npos);
if (HTTP_R.GetVar("track") != ""){
selectedTracks.insert(JSON::Value(HTTP_R.GetVar("track")).asInt());
}
myConn.setHost(HTTP_R.GetHeader("X-Origin"));
streamName = HTTP_R.GetHeader("X-Stream");
parseData = true;
wantRequest = false;
HTTP_R.Clean();
}
}
}

20
src/output/output_srt.h Normal file
View file

@ -0,0 +1,20 @@
#include "output.h"
namespace Mist {
class OutProgressiveSRT : public Output {
public:
OutProgressiveSRT(Socket::Connection & conn);
~OutProgressiveSRT();
static void init(Util::Config * cfg);
void onRequest();
void sendNext();
void onFail();
void sendHeader();
protected:
bool webVTT;
int lastNum;
};
}
typedef Mist::OutProgressiveSRT mistOut;

134
src/output/output_ts.cpp Normal file
View file

@ -0,0 +1,134 @@
#include "output_ts.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
namespace Mist {
OutTS::OutTS(Socket::Connection & conn) : Output(conn){
haveAvcc = false;
AudioCounter = 0;
VideoCounter = 0;
std::string tracks = config->getString("tracks");
unsigned int currTrack = 0;
//loop over tracks, add any found track IDs to selectedTracks
if (tracks != ""){
for (unsigned int i = 0; i < tracks.size(); ++i){
if (tracks[i] >= '0' && tracks[i] <= '9'){
currTrack = currTrack*10 + (tracks[i] - '0');
}else{
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
currTrack = 0;
}
}
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
}
streamName = config->getString("streamname");
parseData = true;
wantRequest = false;
initialize();
}
OutTS::~OutTS() {}
void OutTS::init(Util::Config * cfg){
capa["name"] = "TS";
capa["desc"] = "Enables the raw MPEG Transport Stream protocol over TCP.";
capa["deps"] = "";
capa["required"]["streamname"]["name"] = "Stream";
capa["required"]["streamname"]["help"] = "What streamname to serve. For multiple streams, add this protocol multiple times using different ports.";
capa["required"]["streamname"]["type"] = "str";
capa["required"]["streamname"]["option"] = "--stream";
capa["optional"]["tracks"]["name"] = "Tracks";
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
cfg->addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
cfg->addOption("tracks",
JSON::fromString("{\"arg\":\"string\",\"value\":[\"\"],\"short\": \"t\",\"long\":\"tracks\",\"help\":\"The track IDs of the stream that this connector will transmit separated by spaces.\"}"));
cfg->addConnectorOptions(8888, capa);
config = cfg;
}
void OutTS::sendNext(){
Socket::Buffer ToPack;
char * ContCounter = 0;
bool IsKeyFrame = false;
char * dataPointer = 0;
int dataLen = 0;
currentPacket.getString("data", dataPointer, dataLen);
//detect packet type, and put converted data into ToPack.
if (myMeta.tracks[currentPacket.getTrackId()].type == "video"){
ToPack.append(TS::Packet::getPESVideoLeadIn(0ul, currentPacket.getTime() * 90));
IsKeyFrame = currentPacket.getInt("keyframe");
if (IsKeyFrame){
if (!haveAvcc){
avccbox.setPayload(myMeta.tracks[currentPacket.getTrackId()].init);
haveAvcc = true;
}
ToPack.append(avccbox.asAnnexB());
}
unsigned int i = 0;
while (i + 4 < (unsigned int)dataLen){
unsigned int ThisNaluSize = (dataPointer[i] << 24) + (dataPointer[i+1] << 16) + (dataPointer[i+2] << 8) + dataPointer[i+3];
if (ThisNaluSize + i + 4 > (unsigned int)dataLen){
DEBUG_MSG(DLVL_WARN, "Too big NALU detected (%u > %d) - skipping!", ThisNaluSize + i + 4, dataLen);
break;
}
ToPack.append("\000\000\000\001", 4);
i += 4;
ToPack.append(dataPointer + i, ThisNaluSize);
i += ThisNaluSize;
}
ContCounter = &VideoCounter;
}else if (myMeta.tracks[currentPacket.getTrackId()].type == "audio"){
ToPack.append(TS::Packet::getPESAudioLeadIn(7+dataLen, currentPacket.getTime() * 90));
ToPack.append(TS::GetAudioHeader(dataLen, myMeta.tracks[currentPacket.getTrackId()].init));
ToPack.append(dataPointer, dataLen);
ContCounter = &AudioCounter;
}
bool first = true;
//send TS packets
while (ToPack.size()){
PackData.Clear();
/// \todo Update according to sendHeader()'s generated data.
//0x100 - 1 + currentPacket.getTrackId()
if (myMeta.tracks[currentPacket.getTrackId()].type == "video"){
PackData.PID(0x100);
}else{
PackData.PID(0x101);
}
PackData.ContinuityCounter((*ContCounter)++);
if (first){
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(currentPacket.getTime() * 27000);
}
first = false;
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
myConn.SendNow(PackData.ToString(), 188);
}
}
void OutTS::sendHeader(){
/// \todo Update this to actually generate these from the selected tracks.
/// \todo ts_packet.h contains all neccesary info for this
myConn.SendNow(TS::PAT, 188);
myConn.SendNow(TS::PMT, 188);
sentHeader = true;
}
}

23
src/output/output_ts.h Normal file
View file

@ -0,0 +1,23 @@
#include "output.h"
#include <mist/mp4_generic.h>
#include <mist/ts_packet.h>
namespace Mist {
class OutTS : public Output {
public:
OutTS(Socket::Connection & conn);
~OutTS();
static void init(Util::Config * cfg);
void sendNext();
void sendHeader();
protected:
TS::Packet PackData;
unsigned int PacketNumber;
bool haveAvcc;
char VideoCounter;
char AudioCounter;
MP4::AVCC avccbox;
};
}
typedef Mist::OutTS mistOut;