Shared memory rewrite

This commit is contained in:
Thulinma 2014-04-04 19:50:40 +02:00
parent afcddbfca6
commit cd2fe225c5
81 changed files with 7775 additions and 5411 deletions

View file

@ -25,6 +25,7 @@
#include "embed.js.h"
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
@ -115,14 +116,15 @@ namespace Connector_HTTP {
///Displays a friendly error message.
///\param H The request that was being handled upon timeout.
///\param conn The connection to the client that issued the request.
///\param msg The message to print to the client.
///\return A timestamp indicating when the request was parsed.
long long int proxyHandleTimeout(HTTP::Parser & H, Socket::Connection & conn){
long long int proxyHandleTimeout(HTTP::Parser & H, Socket::Connection & conn, std::string msg){
H.Clean();
H.SetHeader("Server", "mistserver/" PACKAGE_VERSION "/" + Util::Config::libver);
H.SetBody(
"<!DOCTYPE html><html><head><title>Gateway timeout</title></head><body><h1>Gateway timeout</h1>Though the server understood your request and attempted to handle it, somehow handling it took longer than it should. Your request has been cancelled - please try again later.</body></html>");
"<!DOCTYPE html><html><head><title>"+msg+"</title></head><body><h1>"+msg+"</h1>Though the server understood your request and attempted to handle it, somehow handling it took longer than it should. Your request has been cancelled - please try again later.</body></html>");
long long int ret = Util::getMS();
conn.SendNow(H.BuildResponse("504", "Gateway Timeout"));
conn.SendNow(H.BuildResponse("504", msg));
return ret;
}
@ -404,6 +406,7 @@ namespace Connector_HTTP {
H.Clean();
ConnConn * myCConn = 0;
unsigned int counter = 0;
//loop until a connection is available/created
while (!myCConn){
//lock the connection mutex before trying anything
@ -412,6 +415,12 @@ namespace Connector_HTTP {
if ( !connectorConnections.count(uid)){
connectorConnections[uid] = new ConnConn(new Socket::Connection(Util::getTmpFolder() + connector));
connectorConnections[uid]->conn->setBlocking(false); //do not block on spool() with no data
if (!connectorConnections[uid]->conn->spool() && !connectorConnections[uid]->conn){
//unlock the connection mutex before exiting
connMutex.unlock();
DEBUG_MSG(DLVL_FAIL, "Created new connection (%s) failed - aborting request!", uid.c_str());
return Util::getMS();
}
DEBUG_MSG(DLVL_HIGH, "Created new connection %s", uid.c_str());
}
@ -420,11 +429,17 @@ namespace Connector_HTTP {
myCConn = connectorConnections[uid];
//if the connection is dead, delete it and re-loop
if (!myCConn->conn->spool() && !myCConn->conn->connected()){
counter++;
DEBUG_MSG(DLVL_HIGH, "Resetting existing connection %s", uid.c_str());
connectorConnections.erase(uid);
myCConn->inUse.unlock();
delete myCConn;
myCConn = 0;
if (counter++ > 2){
connMutex.unlock();
DEBUG_MSG(DLVL_FAIL, "Created new connection (%s) failed - aborting request!", uid.c_str());
return Util::getMS();
}
}else{
DEBUG_MSG(DLVL_HIGH, "Using active connection %s", uid.c_str());
}
@ -477,7 +492,7 @@ namespace Connector_HTTP {
myCConn->inUse.unlock();
//unset to only read headers
H.headerOnly = false;
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Timeout: fragment too new");
}
myCConn->lastUse = 0;
timeout = 0;
@ -495,9 +510,9 @@ namespace Connector_HTTP {
myCConn->inUse.unlock();
//unset to only read headers
H.headerOnly = false;
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Gateway timeout while waiting for response");
}else{
Util::sleep(5);
Util::sleep(100);
}
}
//unset to only read headers
@ -506,7 +521,7 @@ namespace Connector_HTTP {
//failure, disconnect and sent error to user
myCConn->conn->close();
myCConn->inUse.unlock();
return proxyHandleTimeout(H, conn);
return proxyHandleTimeout(H, conn, "Gateway connection dropped");
}else{
long long int ret = Util::getMS();
//success, check type of response
@ -699,6 +714,14 @@ int main(int argc, char ** argv){
Connector_HTTP::capabilities.removeMember((*it).substr(8));
}
}
if ((*it).substr(0, 7) == "MistOut"){
arg_one = Util::getMyPath() + (*it);
conn_args[0] = arg_one.c_str();
Connector_HTTP::capabilities[(*it).substr(7)] = JSON::fromString(Util::Procs::getOutputOf((char**)conn_args));
if (Connector_HTTP::capabilities[(*it).substr(7)].size() < 1){
Connector_HTTP::capabilities.removeMember((*it).substr(7));
}
}
}
return conf.serveThreadedSocket(Connector_HTTP::proxyHandleHTTPConnection);

View file

@ -1,333 +0,0 @@
/// \file conn_http_dynamic.cpp
/// Contains the main code for the HTTP Dynamic Connector
#include <iostream>
#include <sstream>
#include <queue>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/base64.h>
#include <mist/amf.h>
#include <mist/mp4.h>
#include <mist/mp4_adobe.h>
#include <mist/config.h>
#include <sstream>
#include <mist/stream.h>
#include <mist/timing.h>
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
std::set<int> videoTracks;///<< Holds valid video tracks for playback
long long int audioTrack = 0;///<< Holds audio track ID for playback
void getTracks(DTSC::Meta & metadata){
videoTracks.clear();
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6"){
videoTracks.insert(it->first);
}
if (it->second.codec == "AAC" || it->second.codec == "MP3"){
audioTrack = it->first;
}
}
}
///\brief Builds a bootstrap for use in HTTP Dynamic streaming.
///\param streamName The name of the stream.
///\param trackMeta The current metadata of this track, used to generate the index.
///\param isLive Whether or not the stream is live.
///\param fragnum The index of the current fragment.
///\return The generated bootstrap.
std::string dynamicBootstrap(std::string & streamName, DTSC::Track & trackMeta, bool isLive = false, int fragnum = 0){
std::string empty;
MP4::ASRT asrt;
asrt.setUpdate(false);
asrt.setVersion(1);
//asrt.setQualityEntry(empty, 0);
if (isLive){
asrt.setSegmentRun(1, 4294967295ul, 0);
}else{
asrt.setSegmentRun(1, trackMeta.keys.size(), 0);
}
MP4::AFRT afrt;
afrt.setUpdate(false);
afrt.setVersion(1);
afrt.setTimeScale(1000);
//afrt.setQualityEntry(empty, 0);
MP4::afrt_runtable afrtrun;
int i = 0;
for (std::deque<DTSC::Key>::iterator it = trackMeta.keys.begin(); it != trackMeta.keys.end(); it++){
if (it->getLength()){
afrtrun.firstFragment = it->getNumber();
afrtrun.firstTimestamp = it->getTime();
afrtrun.duration = it->getLength();
afrt.setFragmentRun(afrtrun, i);
i++;
}
}
MP4::ABST abst;
abst.setVersion(1);
abst.setBootstrapinfoVersion(1);
abst.setProfile(0);
abst.setUpdate(false);
abst.setTimeScale(1000);
abst.setLive(isLive);
abst.setCurrentMediaTime(trackMeta.lastms);
abst.setSmpteTimeCodeOffset(0);
abst.setMovieIdentifier(streamName);
abst.setSegmentRunTable(asrt, 0);
abst.setFragmentRunTable(afrt, 0);
#if DEBUG >= 8
std::cout << "Sending bootstrap:" << std::endl << abst.toPrettyString(0) << std::endl;
#endif
return std::string((char*)abst.asBox(), (int)abst.boxedSize());
}
///\brief Builds an index file for HTTP Dynamic streaming.
///\param streamName The name of the stream.
///\param metadata The current metadata, used to generate the index.
///\return The index file for HTTP Dynamic Streaming.
std::string dynamicIndex(std::string & streamName, DTSC::Meta & metadata){
if ( !audioTrack){getTracks(metadata);}
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-8\"?>" << std::endl;
Result << " <manifest xmlns=\"http://ns.adobe.com/f4m/1.0\">" << std::endl;
Result << " <id>" << streamName << "</id>" << std::endl;
Result << " <mimeType>video/mp4</mimeType>" << std::endl;
Result << " <deliveryType>streaming</deliveryType>" << std::endl;
if (metadata.vod){
Result << " <duration>" << metadata.tracks[*videoTracks.begin()].lastms / 1000 << ".000</duration>" << std::endl;
Result << " <streamType>recorded</streamType>" << std::endl;
}else{
Result << " <duration>0.00</duration>" << std::endl;
Result << " <streamType>live</streamType>" << std::endl;
}
for (std::set<int>::iterator it = videoTracks.begin(); it != videoTracks.end(); it++){
Result << " <bootstrapInfo "
"profile=\"named\" "
"id=\"boot" << (*it) << "\" "
"url=\"" << (*it) << ".abst\">"
"</bootstrapInfo>" << std::endl;
}
for (std::set<int>::iterator it = videoTracks.begin(); it != videoTracks.end(); it++){
Result << " <media "
"url=\"" << (*it) << "-\" "
"bitrate=\"" << metadata.tracks[(*it)].bps * 8 << "\" "
"bootstrapInfoId=\"boot" << (*it) << "\" "
"width=\"" << metadata.tracks[(*it)].width << "\" "
"height=\"" << metadata.tracks[(*it)].height << "\">" << std::endl;
Result << " <metadata>AgAKb25NZXRhRGF0YQMAAAk=</metadata>" << std::endl;
Result << " </media>" << std::endl;
}
Result << "</manifest>" << std::endl;
#if DEBUG >= 8
std::cerr << "Sending this manifest:" << std::endl << Result.str() << std::endl;
#endif
return Result.str();
} //BuildManifest
///\brief Main function for the HTTP Dynamic Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int dynamicConnector(Socket::Connection & conn){
FLV::Tag tmp; //temporary tag
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender.
Socket::Connection ss( -1);
std::string streamname;
bool handlingRequest = false;
int Quality = 0;
int ReqFragment = -1;
long long mstime = 0;
long long mslen = 0;
unsigned int lastStats = 0;
conn.setBlocking(false); //do not block on conn.spool() when no data is available
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
HTTP_S.SendResponse("404", "Not found", conn);
continue;
}
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".abst") != std::string::npos){
std::string streamID = HTTP_R.url.substr(streamname.size() + 10);
streamID = streamID.substr(0, streamID.find(".abst"));
HTTP_S.Clean();
HTTP_S.SetBody(dynamicBootstrap(streamname, Strm.metadata.tracks[atoll(streamID.c_str())], Strm.metadata.live));
HTTP_S.SetHeader("Content-Type", "binary/octet");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SendResponse("200", "OK", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}
if (HTTP_R.url.find("f4m") == std::string::npos){
std::string tmp_qual = HTTP_R.url.substr(HTTP_R.url.find("/", 10) + 1);
Quality = atoi(tmp_qual.substr(0, tmp_qual.find("Seg") - 1).c_str());
int temp;
temp = HTTP_R.url.find("Seg") + 3;
temp = HTTP_R.url.find("Frag") + 4;
ReqFragment = atoi(HTTP_R.url.substr(temp).c_str());
#if DEBUG >= 5
printf("Video track %d, fragment %d\n", Quality, ReqFragment);
#endif
if (!audioTrack){getTracks(Strm.metadata);}
DTSC::Track & vidTrack = Strm.metadata.tracks[Quality];
mstime = 0;
mslen = 0;
for (std::deque<DTSC::Key>::iterator it = vidTrack.keys.begin(); it != vidTrack.keys.end(); it++){
if (it->getNumber() >= ReqFragment){
mstime = it->getTime();
mslen = it->getLength();
if (Strm.metadata.live){
if (it == vidTrack.keys.end() - 2){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
HTTP_S.SendResponse("208", "Ask again later", conn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment " << ReqFragment << " not available yet" << std::endl;
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){}
}
}
}
break;
}
}
if (HTTP_R.url == "/"){continue;}//Don't continue, but continue instead.
if (Strm.metadata.live){
if (mstime == 0 && ReqFragment > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
HTTP_S.SendResponse("412", "Fragment out of range", conn);
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment " << ReqFragment << " too old" << std::endl;
continue;
}
}
std::stringstream sstream;
sstream << "t " << Quality << " " << audioTrack << "\ns " << mstime << "\np " << (mstime + mslen) << "\n";
ss.SendNow(sstream.str().c_str());
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, conn);
//send the bootstrap
std::string bootstrap = dynamicBootstrap(streamname, Strm.metadata.tracks[Quality], Strm.metadata.live, ReqFragment);
HTTP_S.Chunkify(bootstrap, conn);
//send a zero-size mdat, meaning it stretches until end of file.
HTTP_S.Chunkify("\000\000\000\000mdat", 8, conn);
//send init data, if needed.
if (audioTrack > 0){
tmp.DTSCAudioInit(Strm.metadata.tracks[audioTrack]);
tmp.tagTime(mstime);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
if (Quality > 0){
tmp.DTSCVideoInit(Strm.metadata.tracks[Quality]);
tmp.tagTime(mstime);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
handlingRequest = true;
}else{
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SetBody(dynamicIndex(streamname, Strm.metadata));
HTTP_S.SendResponse("200", "OK", conn);
}
HTTP_R.Clean(); //clean for any possible next requests
}else{
//sleep for 250ms before next attempt
Util::sleep(250);
}
}
if (ss.connected()){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
}
if (handlingRequest && ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
//send an empty chunk to signify request is done
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){
//send a chunk with the new data
tmp.DTSCLoader(Strm);
HTTP_S.Chunkify(tmp.data, tmp.len, conn);
}
}
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Connector_HTTP_Dynamic main function
} //Connector_HTTP_Dynamic namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Adobe-specific dynamic streaming (also known as HDS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/dynamic/$/manifest.f4m";
capa["url_prefix"] = "/dynamic/$/";
capa["socket"] = "http_dynamic";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/11";
capa["methods"][0u]["priority"] = 7ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::dynamicConnector);
} //main

View file

@ -1,200 +0,0 @@
///\file conn_http_json.cpp
///\brief Contains the main code for the HTTP JSON Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <iomanip>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int JSONConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
std::stringstream jsondata;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_byte = start * 1000; //ms, not s
}else{
seek_byte = start * 1000; //divide by 1mbit, then *1000 for ms.
}
// ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
jsondata.clear();
jsondata << "[";
//we are ready, connect the socket!
if ( !ss.connected()){
ss = Util::Stream::getStream(streamname);
}
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
//ready4data = false;
inited = false;
continue;
}
//wait until we have a header
while ( !Strm.metadata && ss.connected()){
if (ss.spool()){
Strm.parsePacket(ss.Received()); //read the metadata
}else{
Util::sleep(5);
}
}
seek_sec = seek_byte;
std::stringstream cmd;
cmd << "t";
int tid = -1;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (it->second.type == "meta" ){
if (tid == -1){
tid = it->second.trackID;
}
cmd << " " << it->second.trackID;
}
}
if( cmd.str() == "t" ){
cmd.str("");
cmd.clear();
}
int maxTime = Strm.metadata.tracks[tid].lastms;
cmd << "\ns " << seek_sec << "\np " << maxTime << "\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
}
if (inited){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_JSON").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if(Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "application/json"); //Send the correct content-type for FLV files
jsondata << "]";
HTTP_S.SetBody(jsondata.str());
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
inited = false;
jsondata.str(""); // totally do this
jsondata.clear();
break;
}
if (jsondata.str().length() > 1){
jsondata << ",";
}
jsondata << Strm.getPacket().toString();
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_JSON").c_str());
ss.close();
return 0;
} //SRT main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol JSON streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.json";
capa["url_match"] = "/$.json";
capa["url_handler"] = "http";
capa["url_type"] = "json";
capa["socket"] = "http_json";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::JSONConnector);
} //main

View file

@ -1,354 +0,0 @@
/// \file conn_http_dynamic.cpp
/// Contains the main code for the HTTP Dynamic Connector
#include <iostream>
#include <iomanip>
#include <sstream>
#include <queue>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
#include <mist/config.h>
#include <sstream>
#include <mist/stream.h>
#include <mist/timing.h>
#include <mist/ts_packet.h>
/// Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Builds an index file for HTTP Live streaming.
///\param metadata The current metadata, used to generate the index.
///\param isLive Whether or not the stream is live.
///\return The index file for HTTP Live Streaming.
std::string liveIndex(DTSC::Meta & metadata, bool isLive){
std::stringstream result;
result << "#EXTM3U\r\n";
int audioId = -1;
std::string audioName;
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "AAC"){
audioId = it->first;
audioName = it->second.getIdentifier();
break;
}
}
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "H264"){
int bWidth = it->second.bps * 2;
if (audioId != -1){
bWidth += metadata.tracks[audioId].bps * 2;
}
result << "#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=" << bWidth * 10 << "\r\n";
result << it->first;
if (audioId != -1){
result << "_" << audioId;
}
result << "/index.m3u8\r\n";
}
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
}
std::string liveIndex(DTSC::Track & metadata, bool isLive){
std::stringstream result;
//parse single track
int longestFragment = 0;
for (std::deque<DTSC::Fragment>::iterator it = metadata.fragments.begin(); (it + 1) != metadata.fragments.end(); it++){
if (it->getDuration() > longestFragment){
longestFragment = it->getDuration();
}
}
result << "#EXTM3U\r\n"
"#EXT-X-TARGETDURATION:" << (longestFragment / 1000) + 1 << "\r\n"
"#EXT-X-MEDIA-SEQUENCE:" << metadata.missedFrags << "\r\n";
for (std::deque<DTSC::Fragment>::iterator it = metadata.fragments.begin(); it != metadata.fragments.end(); it++){
long long int starttime = metadata.getKey(it->getNumber()).getTime();
if (it != (metadata.fragments.end() - 1)){
result << "#EXTINF:" << ((it->getDuration() + 500) / 1000) << ", no desc\r\n" << starttime << "_" << it->getDuration() + starttime << ".ts\r\n";
}
}
if ( !isLive){
result << "#EXT-X-ENDLIST\r\n";
}
#if DEBUG >= 8
std::cerr << "Sending this index:" << std::endl << result.str() << std::endl;
#endif
return result.str();
} //liveIndex
///\brief Main function for the HTTP Live Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int liveConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender.
bool ready4data = false; //Set to true when streaming is to begin.
bool AppleCompat = false; //Set to true when Apple device detected.
Socket::Connection ss( -1);
std::string streamname;
bool handlingRequest = false;
std::string recBuffer = "";
TS::Packet PackData;
int PacketNumber = 0;
long long unsigned int TimeStamp = 0;
unsigned int ThisNaluSize;
char VideoCounter = 0;
char AudioCounter = 0;
long long unsigned int lastVid = 0;
bool IsKeyFrame = false;
MP4::AVCC avccbox;
bool haveAvcc = false;
std::vector<int> fragIndices;
std::string manifestType;
int Segment = -1;
int temp;
int trackID = 0;
int audioTrackID = 0;
unsigned int lastStats = 0;
conn.setBlocking(false); //do not block on conn.spool() when no data is available
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
AppleCompat = (HTTP_R.GetHeader("User-Agent").find("Apple") != std::string::npos);
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".m3u") == std::string::npos){
temp = HTTP_R.url.find("/", 5) + 1;
std::string allTracks = HTTP_R.url.substr(temp, HTTP_R.url.find("/", temp) - temp);
trackID = atoi(allTracks.c_str());
audioTrackID = atoi(allTracks.substr(allTracks.find("_")+1).c_str());
temp = HTTP_R.url.find("/", temp) + 1;
Segment = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find("_", temp) - temp).c_str());
lastVid = Segment * 90;
temp = HTTP_R.url.find("_", temp) + 1;
int frameCount = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find(".ts", temp) - temp).c_str());
if (Strm.metadata.live){
int seekable = Strm.canSeekms(Segment);
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << Segment << " too old" << std::endl;
continue;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << Segment << " not available yet" << std::endl;
continue;
}
}
for (unsigned int i = 0; i < allTracks.size(); i++){
if (allTracks[i] == '_'){
allTracks[i] = ' ';
}
}
std::stringstream sstream;
sstream << "t " << allTracks << "\n";
sstream << "s " << Segment << "\n";
sstream << "p " << frameCount << "\n";
ss.SendNow(sstream.str().c_str());
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp2t");
HTTP_S.StartResponse(HTTP_R, conn);
handlingRequest = true;
}else{
std::string request = HTTP_R.url.substr(HTTP_R.url.find("/", 5) + 1);
if (HTTP_R.url.find(".m3u8") != std::string::npos){
manifestType = "audio/x-mpegurl";
}else{
manifestType = "audio/mpegurl";
}
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", manifestType);
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest;
if (request.find("/") == std::string::npos){
manifest = liveIndex(Strm.metadata, Strm.metadata.live);
}else{
int selectId = atoi(request.substr(0,request.find("/")).c_str());
manifest = liveIndex(Strm.metadata.tracks[selectId], Strm.metadata.live);
}
HTTP_S.SetBody(manifest);
conn.SendNow(HTTP_S.BuildResponse("200", "OK"));
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}else{
Util::sleep(250);
}
}
if (ready4data){
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Live").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
if ( !haveAvcc){
avccbox.setPayload(Strm.metadata.tracks[trackID].init);
haveAvcc = true;
}
if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){
Socket::Buffer ToPack;
//write PAT and PMT TS packets
if (PacketNumber % 42 == 0){
PackData.DefaultPAT();
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PackData.DefaultPMT();
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber += 2;
}
int PIDno = 0;
char * ContCounter = 0;
if (Strm.lastType() == DTSC::VIDEO){
IsKeyFrame = Strm.getPacket().isMember("keyframe");
if (IsKeyFrame){
TimeStamp = (Strm.getPacket()["time"].asInt() * 27000);
}
ToPack.append(avccbox.asAnnexB());
while (Strm.lastData().size() > 4){
ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3];
Strm.lastData().replace(0, 4, "\000\000\000\001", 4);
if (ThisNaluSize + 4 == Strm.lastData().size()){
ToPack.append(Strm.lastData());
break;
}else{
ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4);
Strm.lastData().erase(0, ThisNaluSize + 4);
}
}
ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &VideoCounter;
}else if (Strm.lastType() == DTSC::AUDIO){
ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata.tracks[audioTrackID].init));
ToPack.append(Strm.lastData());
if (AppleCompat){
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), lastVid));
}else{
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90));
}
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &AudioCounter;
IsKeyFrame = false;
}
//initial packet
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(TimeStamp);
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber++;
//rest of packets
while (ToPack.size()){
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
toSend = PackData.AddStuffing(ToPack.bytes(184));
gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
HTTP_S.Chunkify(PackData.ToString(), 188, conn);
PacketNumber++;
}
}
}
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Live").c_str());
ss.close();
#if DEBUG >= 5
fprintf(stderr, "HLS: User %i disconnected.\n", conn.getSocket());
#endif
return 0;
} //HLS_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Apple-specific streaming (also known as HLS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/hls/$/index.m3u8";
capa["url_prefix"] = "/hls/$/";
capa["socket"] = "http_live";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.apple.mpegurl";
capa["methods"][0u]["priority"] = 9ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::liveConnector);
} //main

View file

@ -1,217 +0,0 @@
///\file conn_http_progressive_flv.cpp
///\brief Contains the main code for the HTTP Progressive FLV Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
FLV::Tag tag;//Temporary tag buffer.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
int videoID = -1;
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_sec = start * 1000; //ms, not s
seek_byte = 0;
}else{
seek_byte = start; //divide by 1mbit, then *1000 for ms.
seek_sec = 0;
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
int byterate = 0;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6")){
videoID = it->second.trackID;
}
if (audioID == -1 && (it->second.codec == "AAC" || it->second.codec == "MP3")){
audioID = it->second.trackID;
}
}
if (videoID != -1){
byterate += Strm.metadata.tracks[videoID].bps;
}
if (audioID != -1){
byterate += Strm.metadata.tracks[audioID].bps;
}
if ( !byterate){byterate = 1;}
if (seek_byte){
seek_sec = (seek_byte / byterate) * 1000;
}
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\ns " << seek_sec << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_FLV"));
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/x-flv"); //Send the correct content-type for FLV files
//HTTP_S.SetHeader("Transfer-Encoding", "chunked");
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
conn.SendNow(FLV::Header, 13); //write FLV header
//write metadata
tag.DTSCMetaInit(Strm, Strm.metadata.tracks[videoID], Strm.metadata.tracks[audioID]);
conn.SendNow(tag.data, tag.len);
//write video init data, if needed
if (videoID != -1){
tag.DTSCVideoInit(Strm.metadata.tracks[videoID]);
conn.SendNow(tag.data, tag.len);
}
//write audio init data, if needed
if (audioID != -1){
tag.DTSCAudioInit(Strm.metadata.tracks[audioID]);
conn.SendNow(tag.data, tag.len);
}
progressive_has_sent_header = true;
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
std::string codec = Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].codec;
if (codec == "AAC" || codec == "MP3" || codec == "H264" || codec == "H263" || codec == "VP6"){
tag.DTSCLoader(Strm);
conn.SendNow(tag.data, tag.len); //write the tag contents
}
}
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Progressive_FLV").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.flv";
capa["url_match"] = "/$.flv";
capa["socket"] = "http_progressive_flv";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/7";
capa["methods"][0u]["priority"] = 5ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,184 +0,0 @@
///\file conn_http_progressive_mp3.cpp
///\brief Contains the main code for the HTTP Progressive MP3 Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
FLV::Tag tag;//Temporary tag buffer.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_sec = 0;//Seek position in ms
unsigned int seek_byte = 0;//Seek position in bytes
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_sec = start * 1000; //ms, not s
}else{
seek_byte = start; //divide by 1mbit, then *1000 for ms.
}
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
int byterate = 0;
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (audioID == -1 && it->second.codec == "MP3"){
audioID = it->second.trackID;
}
}
if (audioID != -1){
byterate += Strm.metadata.tracks[audioID].bps;
}
if ( !byterate){byterate = 1;}
if (seek_byte){
seek_sec = (seek_byte / byterate) * 1000;
}
std::stringstream cmd;
cmd << "t";
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\ns " << seek_sec << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "audio/mpeg"); //Send the correct content-type for MP3 files
//HTTP_S.SetHeader("Transfer-Encoding", "chunked");
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
progressive_has_sent_header = true;
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
if (Strm.lastType() == DTSC::AUDIO){
conn.SendNow(Strm.lastData()); //write the MP3 contents
}
}
}else{
Util::sleep(1);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["codecs"][0u][0u].append("MP3");
capa["url_rel"] = "/$.mp3";
capa["url_match"] = "/$.mp3";
capa["socket"] = "http_progressive_mp3";
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "mp3";
capa["methods"][0u]["priority"] = 8ll;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,656 +0,0 @@
///\file conn_http_progressive_mp4.cpp
///\brief Contains the main code for the HTTP Progressive MP4 Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
#include <mist/defines.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
struct keyPart{
public:
bool operator < (const keyPart& rhs) const {
if (time < rhs.time){
return true;
}
if (time == rhs.time){
if (trackID < rhs.trackID){
return true;
}
}
return false;
}
long unsigned int trackID;
long unsigned int size;
long long unsigned int time;
long long unsigned int endTime;
long unsigned int index;
};
std::string DTSCMeta2MP4Header(DTSC::Meta & metaData, std::set<int> & tracks, long long & size){
std::stringstream header;
//ftyp box
MP4::FTYP ftypBox;
header << std::string(ftypBox.asBox(),ftypBox.boxedSize());
uint64_t mdatSize = 0;
//moov box
MP4::MOOV moovBox;
unsigned int moovOffset = 0;
{
//calculating longest duration
long long int firstms = -1;
long long int lastms = -1;
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
if (lastms == -1 || lastms < metaData.tracks[*it].lastms){
lastms = metaData.tracks[*it].lastms;
}
if (firstms == -1 || firstms > metaData.tracks[*it].firstms){
firstms = metaData.tracks[*it].firstms;
}
}
MP4::MVHD mvhdBox(lastms - firstms);
moovBox.setContent(mvhdBox, moovOffset++);
}
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
MP4::TRAK trakBox;
{
{
MP4::TKHD tkhdBox(*it, metaData.tracks[*it].lastms - metaData.tracks[*it].firstms, metaData.tracks[*it].width, metaData.tracks[*it].height);
trakBox.setContent(tkhdBox, 0);
}{
MP4::MDIA mdiaBox;
unsigned int mdiaOffset = 0;
{
MP4::MDHD mdhdBox(metaData.tracks[*it].lastms - metaData.tracks[*it].firstms);
mdiaBox.setContent(mdhdBox, mdiaOffset++);
}//MDHD box
{
MP4::HDLR hdlrBox(metaData.tracks[*it].type, metaData.tracks[*it].getIdentifier());
mdiaBox.setContent(hdlrBox, mdiaOffset++);
}//hdlr box
{
MP4::MINF minfBox;
unsigned int minfOffset = 0;
if (metaData.tracks[*it].type== "video"){
MP4::VMHD vmhdBox;
vmhdBox.setFlags(1);
minfBox.setContent(vmhdBox,minfOffset++);
}else if (metaData.tracks[*it].type == "audio"){
MP4::SMHD smhdBox;
minfBox.setContent(smhdBox,minfOffset++);
}//type box
{
MP4::DINF dinfBox;
MP4::DREF drefBox;
dinfBox.setContent(drefBox,0);
minfBox.setContent(dinfBox,minfOffset++);
}//dinf box
{
MP4::STBL stblBox;
unsigned int offset = 0;
{
MP4::STSD stsdBox;
stsdBox.setVersion(0);
if (metaData.tracks[*it].type == "video"){//boxname = codec
MP4::VisualSampleEntry vse;
if (metaData.tracks[*it].codec == "H264"){
vse.setCodec("avc1");
}
vse.setDataReferenceIndex(1);
vse.setWidth(metaData.tracks[*it].width);
vse.setHeight(metaData.tracks[*it].height);
MP4::AVCC avccBox;
avccBox.setPayload(metaData.tracks[*it].init);
vse.setCLAP(avccBox);
stsdBox.setEntry(vse,0);
}else if(metaData.tracks[*it].type == "audio"){//boxname = codec
MP4::AudioSampleEntry ase;
if (metaData.tracks[*it].codec == "AAC"){
ase.setCodec("mp4a");
ase.setDataReferenceIndex(1);
}
ase.setSampleRate(metaData.tracks[*it].rate);
ase.setChannelCount(metaData.tracks[*it].channels);
ase.setSampleSize(metaData.tracks[*it].size);
//MP4::ESDS esdsBox(metaData.tracks[*it].init, metaData.tracks[*it].bps);
MP4::ESDS esdsBox;
//outputting these values first, so malloc isn't called as often.
esdsBox.setESHeaderStartCodes(metaData.tracks[*it].init);
esdsBox.setSLValue(2);
esdsBox.setESDescriptorTypeLength(32+metaData.tracks[*it].init.size());
esdsBox.setESID(2);
esdsBox.setStreamPriority(0);
esdsBox.setDecoderConfigDescriptorTypeLength(18 + metaData.tracks[*it].init.size());
esdsBox.setByteObjectTypeID(0x40);
esdsBox.setStreamType(5);
esdsBox.setReservedFlag(1);
esdsBox.setBufferSize(1250000);
esdsBox.setMaximumBitRate(10000000);
esdsBox.setAverageBitRate(metaData.tracks[*it].bps * 8);
esdsBox.setConfigDescriptorTypeLength(5);
esdsBox.setSLConfigDescriptorTypeTag(0x6);
esdsBox.setSLConfigExtendedDescriptorTypeTag(0x808080);
esdsBox.setSLDescriptorTypeLength(1);
ase.setCodecBox(esdsBox);
stsdBox.setEntry(ase,0);
}
stblBox.setContent(stsdBox,offset++);
}//stsd box
{
MP4::STTS sttsBox;
sttsBox.setVersion(0);
if (metaData.tracks[*it].parts.size()){
for (unsigned int part = 0; part < metaData.tracks[*it].parts.size(); part++){
MP4::STTSEntry newEntry;
newEntry.sampleCount = 1;
newEntry.sampleDelta = metaData.tracks[*it].parts[part].getDuration();
sttsBox.setSTTSEntry(newEntry, part);
}
}
stblBox.setContent(sttsBox,offset++);
}//stts box
if (metaData.tracks[*it].type == "video"){
//STSS Box here
MP4::STSS stssBox;
stssBox.setVersion(0);
int tmpCount = 1;
int tmpItCount = 0;
for ( std::deque< DTSC::Key>::iterator tmpIt = metaData.tracks[*it].keys.begin(); tmpIt != metaData.tracks[*it].keys.end(); tmpIt ++) {
stssBox.setSampleNumber(tmpCount,tmpItCount);
tmpCount += tmpIt->getParts();
tmpItCount ++;
}
stblBox.setContent(stssBox,offset++);
}//stss box
{
MP4::STSC stscBox;
stscBox.setVersion(0);
MP4::STSCEntry stscEntry;
stscEntry.firstChunk = 1;
stscEntry.samplesPerChunk = 1;
stscEntry.sampleDescriptionIndex = 1;
stscBox.setSTSCEntry(stscEntry, 0);
stblBox.setContent(stscBox,offset++);
}//stsc box
{
uint32_t total = 0;
MP4::STSZ stszBox;
stszBox.setVersion(0);
total = 0;
for (std::deque< DTSC::Part>::iterator partIt = metaData.tracks[*it].parts.begin(); partIt != metaData.tracks[*it].parts.end(); partIt ++) {
stszBox.setEntrySize(partIt->getSize(), total);//in bytes in file
size += partIt->getSize();
total++;
}
stblBox.setContent(stszBox,offset++);
}//stsz box
//add STCO boxes here
{
MP4::STCO stcoBox;
stcoBox.setVersion(1);
//Inserting empty values on purpose here, will be fixed later.
if (metaData.tracks[*it].parts.size() != 0){
stcoBox.setChunkOffset(0, metaData.tracks[*it].parts.size() - 1);//this inserts all empty entries at once
}
stblBox.setContent(stcoBox,offset++);
}//stco box
minfBox.setContent(stblBox,minfOffset++);
}//stbl box
mdiaBox.setContent(minfBox, mdiaOffset++);
}//minf box
trakBox.setContent(mdiaBox, 1);
}
}//trak Box
moovBox.setContent(trakBox, moovOffset++);
}
//initial offset length ftyp, length moov + 8
unsigned long long int byteOffset = ftypBox.boxedSize() + moovBox.boxedSize() + 8;
//update all STCO from the following map;
std::map <int, MP4::STCO> checkStcoBoxes;
//for all tracks
for (unsigned int i = 1; i < moovBox.getContentCount(); i++){
//10 lines to get the STCO box.
MP4::TRAK checkTrakBox;
MP4::Box checkMdiaBox;
MP4::Box checkTkhdBox;
MP4::MINF checkMinfBox;
MP4::STBL checkStblBox;
//MP4::STCO checkStcoBox;
checkTrakBox = ((MP4::TRAK&)moovBox.getContent(i));
for (unsigned int j = 0; j < checkTrakBox.getContentCount(); j++){
if (checkTrakBox.getContent(j).isType("mdia")){
checkMdiaBox = checkTrakBox.getContent(j);
break;
}
if (checkTrakBox.getContent(j).isType("tkhd")){
checkTkhdBox = checkTrakBox.getContent(j);
}
}
for (unsigned int j = 0; j < ((MP4::MDIA&)checkMdiaBox).getContentCount(); j++){
if (((MP4::MDIA&)checkMdiaBox).getContent(j).isType("minf")){
checkMinfBox = ((MP4::MINF&)((MP4::MDIA&)checkMdiaBox).getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkMinfBox.getContentCount(); j++){
if (checkMinfBox.getContent(j).isType("stbl")){
checkStblBox = ((MP4::STBL&)checkMinfBox.getContent(j));
break;
}
}
for (unsigned int j = 0; j < checkStblBox.getContentCount(); j++){
if (checkStblBox.getContent(j).isType("stco")){
checkStcoBoxes.insert( std::pair<int, MP4::STCO>(((MP4::TKHD&)checkTkhdBox).getTrackID(), ((MP4::STCO&)checkStblBox.getContent(j)) ));
break;
}
}
}
//inserting right values in the STCO box header
//total = 0;
long long unsigned int totalByteOffset = 0;
//Current values are actual byte offset without header-sized offset
std::set <keyPart> sortSet;//filling sortset for interleaving parts
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = metaData.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = metaData.tracks[*subIt].firstms + metaData.tracks[*subIt].parts[0].getDuration();
temp.size = metaData.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
while (!sortSet.empty()){
//setting the right STCO size in the STCO box
checkStcoBoxes[sortSet.begin()->trackID].setChunkOffset(totalByteOffset + byteOffset, sortSet.begin()->index);
totalByteOffset += sortSet.begin()->size;
//add keyPart to sortSet
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < metaData.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + metaData.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = metaData.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
mdatSize = totalByteOffset+8;
header << std::string(moovBox.asBox(),moovBox.boxedSize());
header << (char)((mdatSize>>24) & 0xFF) << (char)((mdatSize>>16) & 0xFF) << (char)((mdatSize>>8) & 0xFF) << (char)(mdatSize & 0xFF) << "mdat";
//end of header
size += header.str().size();
return header.str();
}
/// Calculate a seekPoint, based on byteStart, metadata, tracks and headerSize.
/// The seekPoint will be set to the timestamp of the first packet to send.
void findSeekPoint(long long byteStart, long long & seekPoint, DTSC::Meta & metadata, std::set<int> & tracks, unsigned int headerSize){
seekPoint = 0;
//if we're starting in the header, seekPoint is always zero.
if (byteStart <= headerSize){return;}
//okay, we're past the header. Substract the headersize from the starting postion.
byteStart -= headerSize;
//initialize a list of sorted parts that this file contains
std::set <keyPart> sortSet;
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = metadata.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = metadata.tracks[*subIt].firstms + metadata.tracks[*subIt].parts[0].getDuration();
temp.size = metadata.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
//forward through the file by headers, until we reach the point where we need to be
while (!sortSet.empty()){
//substract the size of this fragment from byteStart
byteStart -= sortSet.begin()->size;
//if that put us past the point where we wanted to be, return right now
if (byteStart < 0){return;}
//otherwise, set seekPoint to where we are now
seekPoint = sortSet.begin()->time;
//then find the next part
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < metadata.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + metadata.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = metadata.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
//If we're here, we're in the last fragment.
//That's technically legal, of course.
}
/// Parses a "Range: " header, setting byteStart, byteEnd and seekPoint using data from metadata and tracks to do
/// the calculations.
/// On error, byteEnd is set to zero.
void parseRange(std::string header, long long & byteStart, long long & byteEnd, long long & seekPoint, DTSC::Meta & metadata, std::set<int> & tracks, unsigned int headerSize){
if (header.size() < 6 || header.substr(0, 6) != "bytes="){
byteEnd = 0;
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
return;
}
header.erase(0, 6);
if (header.size() && header[0] == '-'){
//negative range = count from end
byteStart = 0;
for (unsigned int i = 1; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (byteStart > byteEnd){
//entire file if starting before byte zero
byteStart = 0;
DEBUG_MSG(DLVL_DEVEL, "Full negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}else{
//start byteStart bytes before byteEnd
byteStart = byteEnd - byteStart;
DEBUG_MSG(DLVL_DEVEL, "Partial negative range: %lli-%lli", byteStart, byteEnd);
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}
}else{
long long size = byteEnd;
byteEnd = 0;
byteStart = 0;
unsigned int i = 0;
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteStart *= 10;
byteStart += header[i] - '0';
continue;
}
break;
}
if (header[i] != '-'){
DEBUG_MSG(DLVL_WARN, "Invalid range header: %s", header.c_str());
byteEnd = 0;
return;
}
++i;
if (i < header.size()){
for ( ; i < header.size(); ++i){
if (header[i] >= '0' && header[i] <= '9'){
byteEnd *= 10;
byteEnd += header[i] - '0';
continue;
}
break;
}
if (byteEnd > size-1){byteEnd = size;}
}else{
byteEnd = size;
}
DEBUG_MSG(DLVL_DEVEL, "Range request: %lli-%lli (%s)", byteStart, byteEnd, header.c_str());
findSeekPoint(byteStart, seekPoint, metadata, tracks, headerSize);
return;
}
}//parseRange
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
long long byteStart = 0;
long long leftOver = 0;
long long currPos = 0;
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
std::set <keyPart> sortSet;//filling sortset for interleaving parts
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
DEBUG_MSG(DLVL_DEVEL, "Received request: %s", HTTP_R.getUrl().c_str());
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if (!ss){
ss = Util::Stream::getStream(streamname);
if (ss){
Strm.waitForMeta(ss);
}
if (!ss){
DEBUG_MSG(DLVL_FAIL, "Could not connect to stream %s!", streamname.c_str());
ss.close();
HTTP_S.Clean();
HTTP_R.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
HTTP_S.SendResponse("404", "Not found", conn);
continue;
}
}
int videoID = -1;
int audioID = -1;
if (HTTP_R.GetVar("audio") != ""){
audioID = JSON::Value(HTTP_R.GetVar("audio")).asInt();
}
if (HTTP_R.GetVar("video") != ""){
videoID = JSON::Value(HTTP_R.GetVar("video")).asInt();
}
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && it->second.type == "video" && it->second.codec == "H264"){
videoID = it->first;
}
if (audioID == -1 && it->second.type == "audio" && it->second.codec == "AAC"){
audioID = it->first;
}
}
std::set<int> tracks;
if (videoID > 0){tracks.insert(videoID);}
if (audioID > 0){tracks.insert(audioID);}
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/MP4"); //Send the correct content-type for MP4 files
HTTP_S.SetHeader("Accept-Ranges", "bytes, parsec");
long long size = 0;
std::string headerData = DTSCMeta2MP4Header(Strm.metadata, tracks, size);
byteStart = 0;
long long byteEnd = size-1;
long long seekPoint = 0;
if (HTTP_R.GetHeader("Range") != ""){
parseRange(HTTP_R.GetHeader("Range"), byteStart, byteEnd, seekPoint, Strm.metadata, tracks, headerData.size());
if (!byteEnd){
if (HTTP_R.GetHeader("Range")[0] == 'p'){
HTTP_S.SetBody("Starsystem not in communications range");
HTTP_S.SendResponse("416", "Starsystem not in communications range", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}else{
HTTP_S.SetBody("Requested Range Not Satisfiable");
HTTP_S.SendResponse("416", "Requested Range Not Satisfiable", conn);
HTTP_R.Clean(); //clean for any possible next requests
continue;
}
}else{
std::stringstream rangeReply;
rangeReply << "bytes " << byteStart << "-" << byteEnd << "/" << size;
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that are > 1MiB
if (byteEnd - byteStart + 1 > 1024*1024){
HTTP_S.SetHeader("MistMultiplex", "No");
}
HTTP_S.SetHeader("Content-Range", rangeReply.str());
/// \todo Switch to chunked?
HTTP_S.SendResponse("206", "Partial content", conn);
//HTTP_S.StartResponse("206", "Partial content", HTTP_R, conn);
}
}else{
HTTP_S.SetHeader("Content-Length", byteEnd - byteStart + 1);
//do not multiplex requests that aren't ranged
HTTP_S.SetHeader("MistMultiplex", "No");
/// \todo Switch to chunked?
HTTP_S.SendResponse("200", "OK", conn);
//HTTP_S.StartResponse(HTTP_R, conn);
}
leftOver = byteEnd - byteStart + 1;//add one byte, because range "0-0" = 1 byte of data
currPos = 0;
if (byteStart < (long long)headerData.size()){
/// \todo Switch to chunked?
//HTTP_S.Chunkify(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart, conn);//send MP4 header
conn.SendNow(headerData.data()+byteStart, std::min((long long)headerData.size(), byteEnd) - byteStart);//send MP4 header
leftOver -= std::min((long long)headerData.size(), byteEnd) - byteStart;
}
currPos = headerData.size();//we're now guaranteed to be past the header point, no matter what
HTTP_R.Clean(); //clean for any possible next requests
{//using scope to have cmd not declared after action
std::stringstream cmd;
cmd << "t";
for (std::set<int>::iterator it = tracks.begin(); it != tracks.end(); it++) {
cmd << " " << *it;
}
cmd << "\ns " << seekPoint << "\np\n";
ss.SendNow(cmd.str());
}
sortSet.clear();
for (std::set<int>::iterator subIt = tracks.begin(); subIt != tracks.end(); subIt++) {
keyPart temp;
temp.trackID = *subIt;
temp.time = Strm.metadata.tracks[*subIt].firstms;//timeplace of frame
temp.endTime = Strm.metadata.tracks[*subIt].firstms + Strm.metadata.tracks[*subIt].parts[0].getDuration();
temp.size = Strm.metadata.tracks[*subIt].parts[0].getSize();//bytesize of frame (alle parts all together)
temp.index = 0;
sortSet.insert(temp);
}
inited = true;
}
}else{
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_MP4").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
}else if(Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
//keep track of where we are - fast-forward until where we are now
while (!sortSet.empty() && ((long long)sortSet.begin()->trackID != Strm.getPacket()["trackid"].asInt() || (long long)sortSet.begin()->time != Strm.getPacket()["time"].asInt())){
keyPart temp;
temp.index = sortSet.begin()->index + 1;
temp.trackID = sortSet.begin()->trackID;
if(temp.index < Strm.metadata.tracks[temp.trackID].parts.size() ){//only insert when there are parts left
temp.time = sortSet.begin()->endTime;//timeplace of frame
temp.endTime = sortSet.begin()->endTime + Strm.metadata.tracks[temp.trackID].parts[temp.index].getDuration();
temp.size = Strm.metadata.tracks[temp.trackID].parts[temp.index].getSize();//bytesize of frame
sortSet.insert(temp);
}
currPos += sortSet.begin()->size;
//remove highest keyPart
sortSet.erase(sortSet.begin());
}
if (currPos >= byteStart){
sortSet.clear();//we don't need you anymore!
if (leftOver < (long long)Strm.lastData().size()){
conn.SendNow(Strm.lastData().data(), leftOver);
}else{
conn.SendNow(Strm.lastData());
}
//HTTP_S.Chunkify(Strm.lastData().data(), Strm.lastData().size(), conn);
leftOver -= Strm.lastData().size();
}else{
if (currPos + (long long)Strm.lastData().size() > byteStart){
conn.SendNow(Strm.lastData().data()+(byteStart-currPos), Strm.lastData().size()-(byteStart-currPos));
leftOver -= Strm.lastData().size()-(byteStart-currPos);
currPos = byteStart;
sortSet.clear();//we don't need you anymore!
}
}
if (leftOver < 1){
ss.SendNow("q\n");//stop playback
Strm.waitForPause(ss);//sync the stream
inited = false;
}
}
if (Strm.lastType() == DTSC::INVALID){
DEBUG_MSG(DLVL_FAIL, "Invalid packet received - closing connection");
conn.close();
}
}
}else{
Util::sleep(10);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Progressive_MP4").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.mp4";
capa["url_match"] = "/$.mp4";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/mp4";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
capa["socket"] = "http_progressive_mp4";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,186 +0,0 @@
///\file conn_http_progressive_ogg.cpp
///\brief Contains the main code for the HTTP Progressive OGG Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/ogg.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
#include "../converters/oggconv.h"
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int progressiveConnector(Socket::Connection & conn){
bool progressive_has_sent_header = false;//Indicates whether we have sent a header.
bool ready4data = false; //Set to true when streaming is to begin.
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
//OGG specific variables
//OGG::headerPages oggMeta;
//OGG::Page curOggPage;
OGG::converter oggConv;
std::map <long long unsigned int, std::vector<JSON::Value> > DTSCBuffer;
//std::map <long long unsigned int, long long unsigned int> prevGran;
std::vector<unsigned int> curSegTable;
std::string sendBuffer;
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
int videoID = -1;
int audioID = -1;
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
ready4data = true;
HTTP_R.Clean(); //clean for any possible next requests
}
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
Strm.waitForMeta(ss);
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && it->second.codec == "theora"){
videoID = it->second.trackID;
}
if (audioID == -1 && it->second.codec == "vorbis"){
audioID = it->second.trackID;
}
}
if (videoID == -1 && audioID == -1){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetBody("This stream contains no OGG compatible codecs");
HTTP_S.SendResponse("406", "Not acceptable",conn);
HTTP_R.Clean();
continue;
}
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\np\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Progressive_Ogg").c_str());
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if ( !progressive_has_sent_header){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "video/ogg"); //Send the correct content-type for FLV files
HTTP_S.protocol = "HTTP/1.0";
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
//Fill in ogg header here
oggConv.readDTSCHeader(Strm.metadata);
conn.SendNow((char*)oggConv.parsedPages.c_str(), oggConv.parsedPages.size());
progressive_has_sent_header = true;
}
//parse DTSC to Ogg here
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
std::string tmpString;
oggConv.readDTSCVector(Strm.getPacket(), tmpString);
conn.SendNow(tmpString);
}
if (Strm.lastType() == DTSC::PAUSEMARK){
conn.close();
ss.close();
//last page output
}
if (Strm.lastType() == DTSC::INVALID){
#if DEBUG >= 3
fprintf(stderr, "Invalid packet received - closing connection.\n");
#endif
conn.close();
}
}
}else{
Util::sleep(100);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Dynamic").c_str());
ss.close();
return 0;
} //Progressive_Connector main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.ogg";
capa["url_match"] = "/$.ogg";
capa["socket"] = "http_progressive_ogg";
capa["codecs"][0u][0u].append("theora");
capa["codecs"][0u][1u].append("vorbis");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/ogg";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::progressiveConnector);
} //main

View file

@ -1,516 +0,0 @@
///\file conn_http_smooth.cpp
///\brief Contains the main code for the HTTP Smooth Connector
#include <iostream>
#include <iomanip>
#include <queue>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/json.h>
#include <mist/dtsc.h>
#include <mist/base64.h>
#include <mist/amf.h>
#include <mist/mp4.h>
#include <mist/mp4_ms.h>
#include <mist/mp4_generic.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
long long unsigned int binToInt(std::string & binary){
long long int result = 0;
for ( int i = 0; i < 8; i++){
result <<= 8;
result += binary[i];
}
return result;
}
std::string intToBin(long long unsigned int number){
std::string result;
result.resize(8);
for( int i = 7; i >= 0; i--){
result[i] = number & 0xFF;
number >>= 8;
}
return result;
}
std::string toUTF16(std::string original){
std::string result;
result += (char)0xFF;
result += (char)0xFE;
for (std::string::iterator it = original.begin(); it != original.end(); it++){
result += (*it);
result += (char)0x00;
}
return result;
}
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Builds an index file for HTTP Smooth streaming.
///\param metadata The current metadata, used to generate the index.
///\return The index file for HTTP Smooth Streaming.
std::string smoothIndex(DTSC::Meta & metadata){
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-16\"?>\n";
Result << "<SmoothStreamingMedia "
"MajorVersion=\"2\" "
"MinorVersion=\"0\" "
"TimeScale=\"10000000\" ";
std::deque<std::map<int,DTSC::Track>::iterator> audioIters;
std::deque<std::map<int,DTSC::Track>::iterator> videoIters;
long long int maxWidth = 0;
long long int maxHeight = 0;
long long int minWidth = 99999999;
long long int minHeight = 99999999;
for (std::map<int,DTSC::Track>::iterator it = metadata.tracks.begin(); it != metadata.tracks.end(); it++){
if (it->second.codec == "AAC"){
audioIters.push_back(it);
}
if (it->second.type == "video" && it->second.codec == "H264"){
videoIters.push_back(it);
if (it->second.width > maxWidth){maxWidth = it->second.width;}
if (it->second.width < minWidth){minWidth = it->second.width;}
if (it->second.height > maxHeight){maxHeight = it->second.height;}
if (it->second.height < minHeight){minHeight = it->second.height;}
}
}
if (metadata.vod){
Result << "Duration=\"" << (*videoIters.begin())->second.lastms << "0000\"";
}else{
Result << "Duration=\"0\" "
"IsLive=\"TRUE\" "
"LookAheadFragmentCount=\"2\" "
"DVRWindowLength=\"" << metadata.bufferWindow << "0000\" "
"CanSeek=\"TRUE\" "
"CanPause=\"TRUE\" ";
}
Result << ">\n";
//Add audio entries
if (audioIters.size()){
Result << "<StreamIndex "
"Type=\"audio\" "
"QualityLevels=\"" << audioIters.size() << "\" "
"Name=\"audio\" "
"Chunks=\"" << (*audioIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/A({start time})\">\n";
int index = 0;
for (std::deque<std::map<int,DTSC::Track>::iterator>::iterator it = audioIters.begin(); it != audioIters.end(); it++){
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
for (unsigned int i = 0; i < (*it)->second.init.size(); i++){
Result << std::setfill('0') << std::setw(2) << std::right << (int)(*it)->second.init[i];
}
Result << std::dec << "\" "
"SamplingRate=\"" << (*it)->second.rate << "\" "
"Channels=\"2\" "
"BitsPerSample=\"16\" "
"PacketSize=\"4\" "
"AudioTag=\"255\" "
"FourCC=\"AACL\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*audioIters.begin())->second.keys.size()){
for (std::deque<DTSC::Key>::iterator it = (*audioIters.begin())->second.keys.begin(); it != (((*audioIters.begin())->second.keys.end()) - 1); it++){
Result << "<c ";
if (it == (*audioIters.begin())->second.keys.begin()){
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
//Add video entries
if (videoIters.size()){
Result << "<StreamIndex "
"Type=\"video\" "
"QualityLevels=\"" << videoIters.size() << "\" "
"Name=\"video\" "
"Chunks=\"" << (*videoIters.begin())->second.keys.size() << "\" "
"Url=\"Q({bitrate},{CustomAttributes})/V({start time})\" "
"MaxWidth=\"" << maxWidth << "\" "
"MaxHeight=\"" << maxHeight << "\" "
"DisplayWidth=\"" << maxWidth << "\" "
"DisplayHeight=\"" << maxHeight << "\">\n";
int index = 0;
for (std::deque<std::map<int,DTSC::Track>::iterator>::iterator it = videoIters.begin(); it != videoIters.end(); it++){
//Add video qualities
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
"CodecPrivateData=\"" << std::hex;
MP4::AVCC avccbox;
avccbox.setPayload((*it)->second.init);
std::string tmpString = avccbox.asAnnexB();
for (unsigned int i = 0; i < tmpString.size(); i++){
Result << std::setfill('0') << std::setw(2) << std::right << (int)tmpString[i];
}
Result << std::dec << "\" "
"MaxWidth=\"" << (*it)->second.width << "\" "
"MaxHeight=\"" << (*it)->second.height << "\" "
"FourCC=\"AVC1\" >\n";
Result << "<CustomAttributes>\n"
"<Attribute Name = \"TrackID\" Value = \"" << (*it)->first << "\" />"
"</CustomAttributes>";
Result << "</QualityLevel>\n";
index++;
}
if ((*videoIters.begin())->second.keys.size()){
for (std::deque<DTSC::Key>::iterator it = (*videoIters.begin())->second.keys.begin(); it != (((*videoIters.begin())->second.keys.end()) - 1); it++){
Result << "<c ";
if (it == (*videoIters.begin())->second.keys.begin()){
Result << "t=\"" << it->getTime() * 10000 << "\" ";
}
Result << "d=\"" << it->getLength() * 10000 << "\" />\n";
}
}
Result << "</StreamIndex>\n";
}
Result << "</SmoothStreamingMedia>\n";
#if DEBUG >= 8
std::cerr << "Sending this manifest:" << std::endl << Result << std::endl;
#endif
return toUTF16(Result.str());
} //smoothIndex
///\brief Main function for the HTTP Smooth Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int smoothConnector(Socket::Connection & conn){
std::deque<std::string> dataBuffer;//A buffer for the data that needs to be sent to the client.
DTSC::Stream Strm;//Incoming stream buffer.
HTTP::Parser HTTP_R;//HTTP Receiver
HTTP::Parser HTTP_S;//HTTP Sender.
bool ready4data = false;//Set to true when streaming is to begin.
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
bool handlingRequest = false;
std::string Quality;//Indicates the request quality of the movie.
long long int requestedTime = -1;//Indicates the fragment requested.
std::string parseString;//A string used for parsing different aspects of the request.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
conn.setBlocking(false);//Set the client socket to non-blocking
while (conn.connected()){
if ( !handlingRequest){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
//Get data set by the proxy.
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
if ( !ss){
//initiate Stream Socket
ss = Util::Stream::getStream(streamname);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
ready4data = false;
continue;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
}
if (HTTP_R.url.find(".xap") != std::string::npos){
#include "xap.h"
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "application/siverlight");
HTTP_S.SetHeader("Cache-Control", "cache");
HTTP_S.SetBody("");
HTTP_S.SetHeader("Content-Length", xap_len);
HTTP_S.SendResponse("200", "OK", conn);
conn.SendNow((const char *)xap_data, xap_len);
}else{
if (HTTP_R.url.find("Manifest") == std::string::npos){
//We have a non-manifest request, parse it.
Quality = HTTP_R.url.substr(HTTP_R.url.find("TrackID=", 8) + 8);
Quality = Quality.substr(0, Quality.find(")"));
parseString = HTTP_R.url.substr(HTTP_R.url.find(")/") + 2);
parseString = parseString.substr(parseString.find("(") + 1);
requestedTime = atoll(parseString.substr(0, parseString.find(")")).c_str());
long long int selectedQuality = atoll(Quality.c_str());
DTSC::Track & myRef = Strm.metadata.tracks[selectedQuality];
if (Strm.metadata.live){
int seekable = Strm.canSeekms(requestedTime / 10000);
if (seekable == 0){
// iff the fragment in question is available, check if the next is available too
for (std::deque<DTSC::Key>::iterator it = myRef.keys.begin(); it != myRef.keys.end(); it++){
if (it->getTime() >= (requestedTime / 10000)){
if ((it + 1) == myRef.keys.end()){
seekable = 1;
}
break;
}
}
}
if (seekable < 0){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << requestedTime / 10000 << "ms too old (" << myRef.keys.begin()->getTime() << " - " << myRef.keys.rbegin()->getTime() << " ms)" << std::endl;
continue;
}
if (seekable > 0){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << requestedTime / 10000 << "ms not available yet (" << myRef.keys.begin()->getTime() << " - " << myRef.keys.rbegin()->getTime() << " ms)" << std::endl;
continue;
}
}
//Seek to the right place and send a play-once for a single fragment.
std::stringstream sstream;
long long mstime = 0;
int partOffset = 0;
int keyDur = 0;
DTSC::Key keyObj;
for (std::deque<DTSC::Key>::iterator it = myRef.keys.begin(); it != myRef.keys.end(); it++){
if (it->getTime() >= (requestedTime / 10000)){
mstime = it->getTime();
keyObj = (*it);
std::deque<DTSC::Key>::iterator nextIt = it;
nextIt++;
if (nextIt != myRef.keys.end()){
keyDur = nextIt->getTime() - it->getTime();
}else{
keyDur = -1;
if (Strm.metadata.live){
HTTP_S.Clean();
HTTP_S.SetBody("Proxy, re-request this in a second or two.\n");
conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment after fragment @ " << (requestedTime / 10000) << " not available yet" << std::endl;
}
}
break;
}
partOffset += it->getParts();
}
if (HTTP_R.url == "/"){continue;}//Don't continue, but continue instead.
if (Strm.metadata.live){
if (mstime == 0 && (requestedTime / 10000) > 1){
HTTP_S.Clean();
HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n");
conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range"));
HTTP_R.Clean(); //clean for any possible next requests
std::cout << "Fragment @ " << (requestedTime / 10000) << " too old" << std::endl;
continue;
}
}
sstream << "t " << myRef.trackID << "\n";
sstream << "s " << keyObj.getTime() << "\n";
if (keyDur != -1){
sstream << "p " << keyObj.getTime() + keyDur << "\n";
}else{
sstream << "p\n";
}
ss.SendNow(sstream.str().c_str());
//Wrap everything in mp4 boxes
MP4::MFHD mfhd_box;
mfhd_box.setSequenceNumber(((keyObj.getNumber() - 1) * 2) + myRef.trackID);
MP4::TFHD tfhd_box;
tfhd_box.setFlags(MP4::tfhdSampleFlag);
tfhd_box.setTrackID(myRef.trackID);
if (myRef.type == "video"){
tfhd_box.setDefaultSampleFlags(0x00004001);
}else{
tfhd_box.setDefaultSampleFlags(0x00008002);
}
MP4::TRUN trun_box;
trun_box.setDataOffset(42);
unsigned int keySize = 0;
if (myRef.type == "video"){
trun_box.setFlags(MP4::trundataOffset | MP4::trunfirstSampleFlags | MP4::trunsampleDuration | MP4::trunsampleSize | MP4::trunsampleOffsets);
}else{
trun_box.setFlags(MP4::trundataOffset | MP4::trunsampleDuration | MP4::trunsampleSize);
}
trun_box.setFirstSampleFlags(0x00004002);
for (int i = 0; i < keyObj.getParts(); i++){
MP4::trunSampleInformation trunSample;
trunSample.sampleSize = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getSize();
keySize += Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getSize();
trunSample.sampleDuration = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getDuration() * 10000;
if (myRef.type == "video"){
trunSample.sampleOffset = Strm.metadata.tracks[myRef.trackID].parts[i + partOffset].getOffset() * 10000;
}
trun_box.setSampleInformation(trunSample, i);
}
MP4::SDTP sdtp_box;
sdtp_box.setVersion(0);
if (myRef.type == "video"){
sdtp_box.setValue(36, 4);
for (int i = 1; i < keyObj.getParts(); i++){
sdtp_box.setValue(20, 4 + i);
}
}else{
sdtp_box.setValue(40, 4);
for (int i = 1; i < keyObj.getParts(); i++){
sdtp_box.setValue(40, 4 + i);
}
}
MP4::TRAF traf_box;
traf_box.setContent(tfhd_box, 0);
traf_box.setContent(trun_box, 1);
traf_box.setContent(sdtp_box, 2);
//If the stream is live, we want to have a fragref box if possible
if (Strm.metadata.live){
MP4::UUID_TrackFragmentReference fragref_box;
fragref_box.setVersion(1);
fragref_box.setFragmentCount(0);
int fragCount = 0;
for (unsigned int i = 0; fragCount < 2 && i < myRef.keys.size() - 1; i++){
if (myRef.keys[i].getTime() > (requestedTime / 10000)){
fragref_box.setTime(fragCount, myRef.keys[i].getTime() * 10000);
fragref_box.setDuration(fragCount, myRef.keys[i].getLength() * 10000);
fragref_box.setFragmentCount(++fragCount);
}
}
traf_box.setContent(fragref_box, 3);
}
MP4::MOOF moof_box;
moof_box.setContent(mfhd_box, 0);
//Setting the correct offsets.
moof_box.setContent(traf_box, 1);
trun_box.setDataOffset(moof_box.boxedSize() + 8);
traf_box.setContent(trun_box, 1);
moof_box.setContent(traf_box, 1);
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "video/mp4");
HTTP_S.StartResponse(HTTP_R, conn);
HTTP_S.Chunkify(moof_box.asBox(), moof_box.boxedSize(), conn);
int size = htonl(keySize + 8);
HTTP_S.Chunkify((char*)&size, 4, conn);
HTTP_S.Chunkify("mdat", 4, conn);
handlingRequest = true;
}else{
//We have a request for a Manifest, generate and send it.
HTTP_S.Clean();
HTTP_S.SetHeader("Content-Type", "text/xml");
HTTP_S.SetHeader("Cache-Control", "no-cache");
std::string manifest = smoothIndex(Strm.metadata);
HTTP_S.SetBody(manifest);
HTTP_S.SendResponse("200", "OK", conn);
}
}
ready4data = true;
//Clean for any possible next requests
HTTP_R.Clean();
}else{
//Wait 250ms before checking for new data.
Util::sleep(250);
}
}else{
if (!ready4data){
//Wait 250ms before checking for new data.
Util::sleep(250);
}
}
if (ready4data){
unsigned int now = Util::epoch();
if (now != lastStats){
//Send new stats.
lastStats = now;
ss.SendNow(conn.getStats("HTTP_Smooth"));
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (Strm.lastType() == DTSC::AUDIO || Strm.lastType() == DTSC::VIDEO){
HTTP_S.Chunkify(Strm.lastData(), conn);
}
if (Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Chunkify("", 0, conn);
handlingRequest = false;
}
}
}else{
Util::sleep(10);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_Smooth").c_str());
ss.close();
return 0;
}//Smooth_Connector main function
}//Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol Microsoft-specific smooth streaming through silverlight (also known as HSS).";
capa["deps"] = "HTTP";
capa["url_rel"] = "/smooth/$.ism/Manifest";
capa["url_prefix"] = "/smooth/$.ism/";
capa["socket"] = "http_smooth";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.ms-ss";
capa["methods"][0u]["priority"] = 9ll;
capa["methods"][0u]["nolive"] = 1;
capa["methods"][1u]["handler"] = "http";
capa["methods"][1u]["type"] = "silverlight";
capa["methods"][1u]["priority"] = 1ll;
capa["methods"][1u]["nolive"] = 1;
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::smoothConnector);
} //main

View file

@ -1,223 +0,0 @@
///\file conn_http_srt.cpp
///\brief Contains the main code for the HTTP SRT Connector
#include <iostream>
#include <queue>
#include <sstream>
#include <iomanip>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/http_parser.h>
#include <mist/dtsc.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to HTTP Connectors.
namespace Connector_HTTP {
///\brief Main function for the HTTP Progressive Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int SRTConnector(Socket::Connection & conn){
DTSC::Stream Strm; //Incoming stream buffer.
HTTP::Parser HTTP_R, HTTP_S;//HTTP Receiver en HTTP Sender.
bool inited = false;//Whether the stream is initialized
Socket::Connection ss( -1);//The Stream Socket, used to connect to the desired stream.
std::string streamname;//Will contain the name of the stream.
unsigned int lastStats = 0;//Indicates the last time that we have sent stats to the server socket.
unsigned int seek_time = 0;//Seek position in ms
int trackID = -1; // the track to be selected
int curIndex = 0; // SRT index
bool subtitleTrack = false; // check whether the requested track is a srt track
bool isWebVTT = false;
std::stringstream srtdata; // ss output data
while (conn.connected()){
//Only attempt to parse input when not yet init'ed.
if ( !inited){
if (conn.spool() && HTTP_R.Read(conn)){
#if DEBUG >= 5
std::cout << "Received request: " << HTTP_R.getUrl() << std::endl;
#endif
conn.setHost(HTTP_R.GetHeader("X-Origin"));
streamname = HTTP_R.GetHeader("X-Stream");
int start = 0;
if ( !HTTP_R.GetVar("start").empty()){
start = atoi(HTTP_R.GetVar("start").c_str());
}
if ( !HTTP_R.GetVar("starttime").empty()){
start = atoi(HTTP_R.GetVar("starttime").c_str());
}
if ( !HTTP_R.GetVar("apstart").empty()){
start = atoi(HTTP_R.GetVar("apstart").c_str());
}
if ( !HTTP_R.GetVar("ec_seek").empty()){
start = atoi(HTTP_R.GetVar("ec_seek").c_str());
}
if ( !HTTP_R.GetVar("fs").empty()){
start = atoi(HTTP_R.GetVar("fs").c_str());
}
if ( !HTTP_R.GetVar("trackid").empty()){
trackID = atoi(HTTP_R.GetVar("trackid").c_str());
}
if ( !HTTP_R.GetVar("webvtt").empty()){
isWebVTT = true;
}else{
isWebVTT = false;
}
//under 3 hours we assume seconds, otherwise byte position
if (start < 10800){
seek_time = start * 1000; //ms, not s
}else{
seek_time = start * 1000; //divide by 1mbit, then *1000 for ms.
}
//we are ready, connect the socket!
if ( !ss.connected()){
ss = Util::Stream::getStream(streamname);
}
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server for %s!\n", streamname.c_str());
#endif
ss.close();
HTTP_S.Clean();
HTTP_S.SetBody("No such stream is available on the system. Please try again.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
inited = false;
continue;
}
Strm.waitForMeta(ss);
if(trackID == -1){
// no track was given. Fetch the first track that has SRT data
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (it->second.codec == "srt"){
trackID = it->second.trackID;
subtitleTrack = true;
break;
}
}
}else{
// track *was* given, but we have to check whether it's an actual srt track
subtitleTrack = Strm.metadata.tracks[trackID].codec == "srt";
}
if(!subtitleTrack){
HTTP_S.Clean();
HTTP_S.SetBody("# This track doesn't contain subtitle data.\n");
conn.SendNow(HTTP_S.BuildResponse("404", "Not found"));
subtitleTrack = false;
HTTP_R.Clean();
continue;
}
std::stringstream cmd;
cmd << "t " << trackID;
int maxTime = Strm.metadata.tracks[trackID].lastms;
cmd << "\ns " << seek_time << "\np " << maxTime << "\n";
ss.SendNow(cmd.str().c_str(), cmd.str().size());
inited = true;
HTTP_R.Clean(); //clean for any possible next requests
srtdata.clear();
curIndex = 1; // set to 1, first srt 'track'
}
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(conn.getStats("HTTP_SRT").c_str());
}
if (inited){
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if(Strm.lastType() == DTSC::META){
if(!isWebVTT)
{
srtdata << curIndex++ << std::endl;
}
long long unsigned int time = Strm.getPacket()["time"].asInt();
srtdata << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
srtdata << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
srtdata << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
srtdata << std::setfill('0') << std::setw(3) << time % 1000 << " --> ";
time += Strm.getPacket()["duration"].asInt();
srtdata << std::setfill('0') << std::setw(2) << (time / 3600000) << ":";
srtdata << std::setfill('0') << std::setw(2) << ((time % 3600000) / 60000) << ":";
srtdata << std::setfill('0') << std::setw(2) << (((time % 3600000) % 60000) / 1000) << ",";
srtdata << std::setfill('0') << std::setw(3) << time % 1000 << std::endl;
srtdata << Strm.lastData() << std::endl;
}
if( Strm.lastType() == DTSC::PAUSEMARK){
HTTP_S.Clean(); //make sure no parts of old requests are left in any buffers
HTTP_S.SetHeader("Content-Type", "text/plain"); //Send the correct content-type for FLV files
HTTP_S.SetBody( (isWebVTT ? "WEBVTT\n\n" : "") + srtdata.str());
conn.SendNow(HTTP_S.BuildResponse("200", "OK")); //no SetBody = unknown length - this is intentional, we will stream the entire file
inited = false;
srtdata.str("");
srtdata.clear();
}
}
}else{
Util::sleep(200);
}
if ( !ss.connected()){
break;
}
}
}
conn.close();
ss.SendNow(conn.getStats("HTTP_SRT").c_str());
ss.close();
return 0;
} //SRT main function
} //Connector_HTTP namespace
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables HTTP protocol subtitle streaming.";
capa["deps"] = "HTTP";
capa["url_rel"] = "/$.srt";
capa["url_match"] = "/$.srt";
capa["url_handler"] = "http";
capa["url_type"] = "subtitle";
capa["socket"] = "http_srt";
conf.addBasicConnectorOptions(capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_HTTP::SRTConnector);
} //main

View file

@ -1,58 +0,0 @@
/// \file conn_raw.cpp
/// Contains the main code for the RAW connector.
#include <iostream>
#include <sstream>
#include <mist/config.h>
#include <mist/socket.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Contains the main code for the RAW connector.
///
///Expects a single commandline argument telling it which stream to connect to,
///then outputs the raw stream to stdout.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
conf.addBasicConnectorOptions(capa);
conf.addOption("stream_name", JSON::fromString("{\"arg_num\":1, \"help\":\"Name of the stream to write to stdout.\"}"));
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << "null" << std::endl;
return -1;
}
//connect to the proper stream
Socket::Connection S = Util::Stream::getStream(conf.getString("stream_name"));
S.setBlocking(false);
if ( !S.connected()){
std::cout << "Could not open stream " << conf.getString("stream_name") << std::endl;
return 1;
}
long long int lastStats = 0;
long long int started = Util::epoch();
while (std::cout.good() && S.connected()){
if (S.spool()){
while (S.Received().size()){
std::cout.write(S.Received().get().c_str(), S.Received().get().size());
S.Received().get().clear();
}
}else{
Util::sleep(500); //sleep 500ms if no data
}
unsigned int now = Util::epoch();
if (now != lastStats){
lastStats = now;
std::stringstream st;
st << "S localhost RAW " << (Util::epoch() - started) << " " << S.dataDown() << " " << S.dataUp() << "\n";
S.SendNow(st.str().c_str());
}
}
std::stringstream st;
st << "S localhost RAW " << (Util::epoch() - started) << " " << S.dataDown() << " " << S.dataUp() << "\n";
S.SendNow(st.str().c_str());
S.close();
return 0;
}

View file

@ -1,700 +0,0 @@
/// \file conn_rtmp.cpp
/// Contains the main code for the RTMP Connector
#include <iostream>
#include <sstream>
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include <unistd.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <getopt.h>
#include <mist/socket.h>
#include <mist/config.h>
#include <mist/flv_tag.h>
#include <mist/amf.h>
#include <mist/rtmpchunks.h>
#include <mist/stream.h>
#include <mist/timing.h>
///\brief Holds everything unique to the RTMP Connector
namespace Connector_RTMP {
//for connection to server
bool ready4data = false; ///< Indicates whether streaming can start.
bool inited = false; ///< Indicates whether we are ready to connect to the Buffer.
bool noStats = false; ///< Indicates when no stats should be sent anymore. Used in push mode.
bool stopParsing = false; ///< Indicates when to stop all parsing.
bool streamReset = false;
//for reply to play command
int playTransaction = -1;///<The transaction number of the reply.
int playStreamId = -1;///<The stream id of the reply.
int playMessageType = -1;///<The message type of the reply.
//generic state keeping
bool streamInited = false;///<Indicates whether init data for audio/video was sent.
int videoID = -1;
int audioID = -1;
Socket::Connection Socket; ///< A copy of the user socket to allow helper functions to directly send data.
Socket::Connection ss; ///< Socket connected to server.
std::string streamName; ///< Stream that will be opened.
std::string app_name; ///< Name of the application that was opened
///\brief Sends a RTMP command either in AMF or AMF3 mode.
///\param amfReply The data to be sent over RTMP.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void sendCommand(AMF::Object & amfReply, int messageType, int streamId){
#if DEBUG >= 8
std::cerr << amfReply.Print() << std::endl;
#endif
if (messageType == 17){
Socket.SendNow(RTMPStream::SendChunk(3, messageType, streamId, (char)0 + amfReply.Pack()));
}else{
Socket.SendNow(RTMPStream::SendChunk(3, messageType, streamId, amfReply.Pack()));
}
} //sendCommand
///\brief Parses a single AMF command message, and sends a direct response through sendCommand().
///\param amfData The received request.
///\param messageType The type of message.
///\param streamId The ID of the AMF stream.
void parseAMFCommand(AMF::Object & amfData, int messageType, int streamId){
#if DEBUG >= 5
fprintf(stderr, "Received command: %s\n", amfData.Print().c_str());
#endif
#if DEBUG >= 8
fprintf(stderr, "AMF0 command: %s\n", amfData.getContentP(0)->StrValue().c_str());
#endif
if (amfData.getContentP(0)->StrValue() == "connect"){
double objencoding = 0;
if (amfData.getContentP(2)->getContentP("objectEncoding")){
objencoding = amfData.getContentP(2)->getContentP("objectEncoding")->NumValue();
}
#if DEBUG >= 6
int tmpint;
if (amfData.getContentP(2)->getContentP("videoCodecs")){
tmpint = (int)amfData.getContentP(2)->getContentP("videoCodecs")->NumValue();
if (tmpint & 0x04){
fprintf(stderr, "Sorensen video support detected\n");
}
if (tmpint & 0x80){
fprintf(stderr, "H264 video support detected\n");
}
}
if (amfData.getContentP(2)->getContentP("audioCodecs")){
tmpint = (int)amfData.getContentP(2)->getContentP("audioCodecs")->NumValue();
if (tmpint & 0x04){
fprintf(stderr, "MP3 audio support detected\n");
}
if (tmpint & 0x400){
fprintf(stderr, "AAC audio support detected\n");
}
}
#endif
app_name = amfData.getContentP(2)->getContentP("tcUrl")->StrValue();
app_name = app_name.substr(app_name.find('/', 7) + 1);
RTMPStream::chunk_snd_max = 4096;
Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
Socket.Send(RTMPStream::SendCTL(6, RTMPStream::rec_window_size)); //send rec window acknowledgement size (msg 6)
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("")); //server properties
amfReply.getContentP(2)->addContent(AMF::Object("fmsVer", "FMS/3,5,5,2004"));
amfReply.getContentP(2)->addContent(AMF::Object("capabilities", (double)31));
amfReply.getContentP(2)->addContent(AMF::Object("mode", (double)1));
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetConnection.Connect.Success"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Connection succeeded."));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", 1337));
amfReply.getContentP(3)->addContent(AMF::Object("objectEncoding", objencoding));
//amfReply.getContentP(3)->addContent(AMF::Object("data", AMF::AMF0_ECMA_ARRAY));
//amfReply.getContentP(3)->getContentP(4)->addContent(AMF::Object("version", "3,5,4,1004"));
sendCommand(amfReply, messageType, streamId);
//send onBWDone packet - no clue what it is, but real server sends it...
//amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
//amfReply.addContent(AMF::Object("", "onBWDone"));//result
//amfReply.addContent(amfData.getContent(1));//same transaction ID
//amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null
//sendCommand(amfReply, messageType, streamId);
return;
} //connect
if (amfData.getContentP(0)->StrValue() == "createStream"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)1)); //stream ID - we use 1
sendCommand(amfReply, messageType, streamId);
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
return;
} //createStream
if ((amfData.getContentP(0)->StrValue() == "closeStream") || (amfData.getContentP(0)->StrValue() == "deleteStream")){
if (ss.connected()){
ss.close();
}
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCUnpublish") || (amfData.getContentP(0)->StrValue() == "releaseStream")){
// ignored
return;
}
if ((amfData.getContentP(0)->StrValue() == "FCPublish")){
//send a FCPublic reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onFCPublish")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Please followup with publish command..."));
sendCommand(amfReply, messageType, streamId);
return;
} //FCPublish
if (amfData.getContentP(0)->StrValue() == "releaseStream"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", AMF::AMF0_UNDEFINED)); //stream ID?
sendCommand(amfReply, messageType, streamId);
return;
}//releaseStream
if ((amfData.getContentP(0)->StrValue() == "getStreamLength") || (amfData.getContentP(0)->StrValue() == "getMovLen")){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0)); //zero length
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if ((amfData.getContentP(0)->StrValue() == "publish")){
if (amfData.getContentP(3)){
streamName = amfData.getContentP(3)->StrValue();
/// \todo implement push for MistPlayer or restrict and change to getLive
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
Socket.close(); //disconnect user
return;
}
DTSC::Stream Strm;
Strm.waitForMeta(ss);
ss.Send("P ");
ss.Send(Socket.getHost().c_str());
ss.Send(" ");
ss.Send(app_name);
ss.SendNow("\n");
streamReset = true;
noStats = true;
}
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", 1, AMF::AMF0_BOOL)); //publish success?
sendCommand(amfReply, messageType, streamId);
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//send a status reply
amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Stream is now published!"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, messageType, streamId);
return;
} //getStreamLength
if (amfData.getContentP(0)->StrValue() == "checkBandwidth"){
//send a _result reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "_result")); //result success
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
sendCommand(amfReply, messageType, streamId);
return;
} //checkBandwidth
if ((amfData.getContentP(0)->StrValue() == "play") || (amfData.getContentP(0)->StrValue() == "play2")){
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
playTransaction = amfData.getContentP(1)->NumValue();
playMessageType = messageType;
playStreamId = streamId;
streamName = amfData.getContentP(3)->StrValue();
Connector_RTMP::ready4data = true; //start sending video data!
return;
} //play
if ((amfData.getContentP(0)->StrValue() == "seek")){
//set reply number and stream name, actual reply is sent up in the ss.spool() handler
playTransaction = amfData.getContentP(1)->NumValue();
playMessageType = messageType;
playStreamId = streamId;
streamInited = false;
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Seek.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Seeking to the specified time"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
ss.Send("s ");
ss.Send(JSON::Value((long long int)amfData.getContentP(3)->NumValue()).asString().c_str());
ss.SendNow("\n");
return;
} //seek
if ((amfData.getContentP(0)->StrValue() == "pauseRaw") || (amfData.getContentP(0)->StrValue() == "pause")){
if (amfData.getContentP(3)->NumValue()){
ss.Send("q\n"); //quit playing
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Pause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Pausing playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
}else{
ss.SendNow("p\n"); //start playing
//send a status reply
AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER);
amfReply.addContent(AMF::Object("", "onStatus")); //status reply
amfReply.addContent(amfData.getContent(1)); //same transaction ID
amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfReply.addContent(AMF::Object("")); //info
amfReply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Unpause.Notify"));
amfReply.getContentP(3)->addContent(AMF::Object("description", "Resuming playback"));
amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfReply, playMessageType, playStreamId);
}
return;
} //seek
#if DEBUG >= 2
fprintf(stderr, "AMF0 command not processed!\n%s\n", amfData.Print().c_str());
#endif
} //parseAMFCommand
///\brief Gets and parses one RTMP chunk at a time.
///\param inputBuffer A buffer filled with chunk data.
void parseChunk(Socket::Buffer & inputBuffer){
//for DTSC conversion
static DTSC::Meta meta_out;
static std::stringstream prebuffer; // Temporary buffer before sending real data
static bool sending = false;
static unsigned int counter = 0;
//for chunk parsing
static RTMPStream::Chunk next;
static FLV::Tag F;
static AMF::Object amfdata("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object amfelem("empty", AMF::AMF0_DDV_CONTAINER);
static AMF::Object3 amf3data("empty", AMF::AMF3_DDV_CONTAINER);
static AMF::Object3 amf3elem("empty", AMF::AMF3_DDV_CONTAINER);
while (next.Parse(inputBuffer)){
//send ACK if we received a whole window
if ((RTMPStream::rec_cnt - RTMPStream::rec_window_at > RTMPStream::rec_window_size)){
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
}
switch (next.msg_type_id){
case 0: //does not exist
#if DEBUG >= 2
fprintf(stderr, "UNKN: Received a zero-type message. Possible data corruption? Aborting!\n");
#endif
while (inputBuffer.size()){
inputBuffer.get().clear();
}
ss.close();
Socket.close();
break; //happens when connection breaks unexpectedly
case 1: //set chunk size
RTMPStream::chunk_rec_max = ntohl(*(int*)next.data.c_str());
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set chunk size: %i\n", RTMPStream::chunk_rec_max);
#endif
break;
case 2: //abort message - we ignore this one
#if DEBUG >= 5
fprintf(stderr, "CTRL: Abort message\n");
#endif
//4 bytes of stream id to drop
break;
case 3: //ack
#if DEBUG >= 8
fprintf(stderr, "CTRL: Acknowledgement\n");
#endif
RTMPStream::snd_window_at = ntohl(*(int*)next.data.c_str());
RTMPStream::snd_window_at = RTMPStream::snd_cnt;
break;
case 4: {
//2 bytes event type, rest = event data
//types:
//0 = stream begin, 4 bytes ID
//1 = stream EOF, 4 bytes ID
//2 = stream dry, 4 bytes ID
//3 = setbufferlen, 4 bytes ID, 4 bytes length
//4 = streamisrecorded, 4 bytes ID
//6 = pingrequest, 4 bytes data
//7 = pingresponse, 4 bytes data
//we don't need to process this
#if DEBUG >= 5
short int ucmtype = ntohs(*(short int*)next.data.c_str());
switch (ucmtype){
case 0:
fprintf(stderr, "CTRL: UCM StreamBegin %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 1:
fprintf(stderr, "CTRL: UCM StreamEOF %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 2:
fprintf(stderr, "CTRL: UCM StreamDry %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 3:
fprintf(stderr, "CTRL: UCM SetBufferLength %i %i\n", ntohl(*((int*)(next.data.c_str()+2))), ntohl(*((int*)(next.data.c_str()+6))));
break;
case 4:
fprintf(stderr, "CTRL: UCM StreamIsRecorded %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 6:
fprintf(stderr, "CTRL: UCM PingRequest %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
case 7:
fprintf(stderr, "CTRL: UCM PingResponse %i\n", ntohl(*((int*)(next.data.c_str()+2))));
break;
default:
fprintf(stderr, "CTRL: UCM Unknown (%hi)\n", ucmtype);
break;
}
#endif
}
break;
case 5: //window size of other end
#if DEBUG >= 5
fprintf(stderr, "CTRL: Window size\n");
#endif
RTMPStream::rec_window_size = ntohl(*(int*)next.data.c_str());
RTMPStream::rec_window_at = RTMPStream::rec_cnt;
Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3)
break;
case 6:
#if DEBUG >= 5
fprintf(stderr, "CTRL: Set peer bandwidth\n");
#endif
//4 bytes window size, 1 byte limit type (ignored)
RTMPStream::snd_window_size = ntohl(*(int*)next.data.c_str());
Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5)
break;
case 8: //audio data
case 9: //video data
case 18: //meta data
if (ss.connected()){
if (streamReset){
//reset push data to empty, in case stream properties change
meta_out.reset();
prebuffer.str("");
sending = false;
counter = 0;
streamReset = false;
}
F.ChunkLoader(next);
JSON::Value pack_out = F.toJSON(meta_out);
if ( !pack_out.isNull()){
if ( !sending){
counter++;
if (counter > 8){
sending = true;
meta_out.send(ss);
ss.SendNow(prebuffer.str()); //write buffer
prebuffer.str(""); //clear buffer
pack_out.sendTo(ss);
}else{
prebuffer << pack_out.toNetPacked();
}
}else{
pack_out.sendTo(ss);
}
}
}else{
#if DEBUG >= 5
fprintf(stderr, "Received useless media data\n");
#endif
Socket.close();
}
break;
case 15:
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 data message\n");
#endif
break;
case 16:
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 shared object\n");
#endif
break;
case 17: {
#if DEBUG >= 5
fprintf(stderr, "Received AFM3 command message\n");
#endif
if (next.data[0] != 0){
next.data = next.data.substr(1);
amf3data = AMF::parse3(next.data);
#if DEBUG >= 5
amf3data.Print();
#endif
}else{
#if DEBUG >= 5
fprintf(stderr, "Received AFM3-0 command message\n");
#endif
next.data = next.data.substr(1);
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 17, next.msg_stream_id);
} //parsing AMF0-style
}
break;
case 19:
#if DEBUG >= 5
fprintf(stderr, "Received AFM0 shared object\n");
#endif
break;
case 20: { //AMF0 command message
amfdata = AMF::parse(next.data);
parseAMFCommand(amfdata, 20, next.msg_stream_id);
}
break;
case 22:
#if DEBUG >= 5
fprintf(stderr, "Received aggregate message\n");
#endif
break;
default:
#if DEBUG >= 1
fprintf(stderr, "Unknown chunk received! Probably protocol corruption, stopping parsing of incoming data.\n");
#endif
stopParsing = true;
break;
}
}
} //parseChunk
///\brief Main function for the RTMP Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int rtmpConnector(Socket::Connection & conn){
Socket = conn;
Socket.setBlocking(false);
FLV::Tag tag, init_tag;
DTSC::Stream Strm;
while ( !Socket.Received().available(1537) && Socket.connected()){
Socket.spool();
Util::sleep(5);
}
RTMPStream::handshake_in = Socket.Received().remove(1537);
RTMPStream::rec_cnt += 1537;
if (RTMPStream::doHandshake()){
Socket.SendNow(RTMPStream::handshake_out);
while ( !Socket.Received().available(1536) && Socket.connected()){
Socket.spool();
Util::sleep(5);
}
Socket.Received().remove(1536);
RTMPStream::rec_cnt += 1536;
#if DEBUG >= 5
fprintf(stderr, "Handshake succcess!\n");
#endif
}else{
fprintf(stderr, "RTMP: Handshake fail!\n");
return 0;
}
unsigned int lastStats = 0;
bool firsttime = true;
while (Socket.connected()){
if (Socket.spool() || firsttime){
parseChunk(Socket.Received());
firsttime = false;
}else{
Util::sleep(1); //sleep 1ms to prevent high CPU usage
}
if (ready4data){
if ( !inited){
//we are ready, connect the socket!
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
Socket.close(); //disconnect user
break;
}
ss.setBlocking(false);
Strm.waitForMeta(ss);
//find first audio and video tracks
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (videoID == -1 && (it->second.codec == "H264" || it->second.codec == "H263" || it->second.codec == "VP6")){
videoID = it->second.trackID;
}
if (audioID == -1 && (it->second.codec == "AAC" || it->second.codec == "MP3")){
audioID = it->second.trackID;
}
}
//select the tracks and play
std::stringstream cmd;
cmd << "t";
if (videoID != -1){
cmd << " " << videoID;
}
if (audioID != -1){
cmd << " " << audioID;
}
cmd << "\np\n";
ss.SendNow(cmd.str().c_str());
inited = true;
}
if (inited && !noStats){
long long int now = Util::epoch();
if (now != lastStats){
lastStats = now;
ss.SendNow(Socket.getStats("RTMP"));
}
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
if (playTransaction != -1){
//send a status reply
AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting..."));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
//send streamisrecorded if stream, well, is recorded.
if (Strm.metadata.vod){//isMember("length") && Strm.metadata["length"].asInt() > 0){
Socket.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1
}
//send streambegin
Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1
//and more reply
amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER);
amfreply.addContent(AMF::Object("", "onStatus")); //status reply
amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID
amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info
amfreply.addContent(AMF::Object("")); //info
amfreply.getContentP(3)->addContent(AMF::Object("level", "status"));
amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start"));
amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!"));
amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV"));
amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337));
sendCommand(amfreply, playMessageType, playStreamId);
RTMPStream::chunk_snd_max = 102400; //100KiB
Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1)
//send dunno?
Socket.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1
playTransaction = -1;
}
//sent init data if needed
if ( !streamInited){
init_tag.DTSCMetaInit(Strm, Strm.metadata.tracks[videoID], Strm.metadata.tracks[audioID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
if (audioID != -1){
init_tag.DTSCAudioInit(Strm.metadata.tracks[audioID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
}
if (videoID != -1){
init_tag.DTSCVideoInit(Strm.metadata.tracks[videoID]);
if (init_tag.len){
Socket.SendNow(RTMPStream::SendMedia(init_tag));
}
}
streamInited = true;
}
//sent a tag
if (tag.DTSCLoader(Strm)){
if (tag.len){
Socket.SendNow(RTMPStream::SendMedia(tag));
#if DEBUG >= 8
fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str());
#endif
}
}
}
}
}
}
Socket.close();
ss.SendNow(Socket.getStats("RTMP").c_str());
ss.close();
return 0;
} //Connector_RTMP
}
///\brief The standard process-spawning main function.
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables the RTMP protocol which is used by Adobe Flash Player.";
capa["deps"] = "";
capa["url_rel"] = "/play/$";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("H263");
capa["codecs"][0u][0u].append("VP6");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["methods"][0u]["handler"] = "rtmp";
capa["methods"][0u]["type"] = "flash/10";
capa["methods"][0u]["priority"] = 6ll;
conf.addConnectorOptions(1935, capa);
conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
return conf.serveForkedSocket(Connector_RTMP::rtmpConnector);
} //main

View file

@ -1,215 +0,0 @@
/// \file conn_ts.cpp
/// Contains the main code for the TS Connector
#include <queue>
#include <string>
#include <iostream>
#include <cmath>
#include <ctime>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <unistd.h>
#include <getopt.h>
#include <sys/time.h>
#include <sys/wait.h>
#include <sys/types.h>
#include <mist/socket.h>
#include <mist/config.h>
#include <mist/stream.h>
#include <mist/ts_packet.h> //TS support
#include <mist/dtsc.h> //DTSC support
#include <mist/mp4.h> //For initdata conversion
#include <mist/mp4_generic.h>
///\brief Holds everything unique to the TS Connector
namespace Connector_TS {
std::string streamName;
std::string trackIDs;
///\brief Main function for the TS Connector
///\param conn A socket describing the connection the client.
///\return The exit code of the connector.
int tsConnector(Socket::Connection & conn){
std::string ToPack;
TS::Packet PackData;
std::string DTMIData;
int PacketNumber = 0;
long long unsigned int TimeStamp = 0;
unsigned int ThisNaluSize;
char VideoCounter = 0;
char AudioCounter = 0;
bool IsKeyFrame = false;
MP4::AVCC avccbox;
bool haveAvcc = false;
DTSC::Stream Strm;
bool inited = false;
Socket::Connection ss;
while (conn.connected()){
if ( !inited){
ss = Util::Stream::getStream(streamName);
if ( !ss.connected()){
#if DEBUG >= 1
fprintf(stderr, "Could not connect to server!\n");
#endif
conn.close();
break;
}
if(trackIDs == ""){
std::stringstream tmpTracks;
// no track ids given? Find the first video and first audio track (if available) and use those!
int videoID = -1;
int audioID = -1;
Strm.waitForMeta(ss);
for (std::map<int,DTSC::Track>::iterator it = Strm.metadata.tracks.begin(); it != Strm.metadata.tracks.end(); it++){
if (audioID == -1 && it->second.codec == "AAC"){
audioID = it->first;
tmpTracks << " " << it->first;
}
if (videoID == -1 && it->second.codec == "H264"){
videoID = it->first;
tmpTracks << " " << it->first;
}
} // for iterator
trackIDs += tmpTracks.str();
} // if trackIDs == ""
std::string cmd = "t " + trackIDs + "\ns 0\np\n";
ss.SendNow( cmd );
inited = true;
}
if (ss.spool()){
while (Strm.parsePacket(ss.Received())){
std::stringstream TSBuf;
Socket::Buffer ToPack;
//write PAT and PMT TS packets
if (PacketNumber == 0){
PackData.DefaultPAT();
TSBuf.write(PackData.ToString(), 188);
PackData.DefaultPMT();
TSBuf.write(PackData.ToString(), 188);
PacketNumber += 2;
}
int PIDno = 0;
char * ContCounter = 0;
if (Strm.lastType() == DTSC::VIDEO){
if ( !haveAvcc){
avccbox.setPayload(Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].init);
haveAvcc = true;
}
IsKeyFrame = Strm.getPacket().isMember("keyframe");
if (IsKeyFrame){
TimeStamp = (Strm.getPacket()["time"].asInt() * 27000);
}
ToPack.append(avccbox.asAnnexB());
while (Strm.lastData().size() > 4){
ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3];
Strm.lastData().replace(0, 4, "\000\000\000\001", 4);
if (ThisNaluSize + 4 == Strm.lastData().size()){
ToPack.append(Strm.lastData());
break;
}else{
ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4);
Strm.lastData().erase(0, ThisNaluSize + 4);
}
}
ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &VideoCounter;
}else if (Strm.lastType() == DTSC::AUDIO){
ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata.tracks[Strm.getPacket()["trackid"].asInt()].init));
ToPack.append(Strm.lastData());
ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90));
PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt();
ContCounter = &AudioCounter;
IsKeyFrame = false;
}
//initial packet
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
PackData.UnitStart(1);
if (IsKeyFrame){
PackData.RandomAccess(1);
PackData.PCR(TimeStamp);
}
unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184));
std::string gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
TSBuf.write(PackData.ToString(), 188);
PacketNumber++;
//rest of packets
while (ToPack.size()){
PackData.Clear();
PackData.PID(PIDno);
PackData.ContinuityCounter(( *ContCounter)++);
toSend = PackData.AddStuffing(ToPack.bytes(184));
gonnaSend = ToPack.remove(toSend);
PackData.FillFree(gonnaSend);
TSBuf.write(PackData.ToString(), 188);
PacketNumber++;
}
TSBuf.flush();
if (TSBuf.str().size()){
conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size());
TSBuf.str("");
}
TSBuf.str("");
PacketNumber = 0;
}
}else{
Util::sleep(1000);
conn.spool();
}
}
return 0;
}
}
int main(int argc, char ** argv){
Util::Config conf(argv[0], PACKAGE_VERSION);
JSON::Value capa;
capa["desc"] = "Enables the raw MPEG Transport Stream protocol over TCP.";
capa["deps"] = "";
capa["required"]["streamname"]["name"] = "Stream";
capa["required"]["streamname"]["help"] = "What streamname to serve. For multiple streams, add this protocol multiple times using different ports.";
capa["required"]["streamname"]["type"] = "str";
capa["required"]["streamname"]["option"] = "--stream";
capa["optional"]["tracks"]["name"] = "Tracks";
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
conf.addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
conf.addOption("tracks",
JSON::fromString("{\"arg\":\"string\",\"value\":[\"\"],\"short\": \"t\",\"long\":\"tracks\",\"help\":\"The track IDs of the stream that this connector will transmit separated by spaces.\"}"));
conf.addConnectorOptions(8888, capa);
bool ret = conf.parseArgs(argc, argv);
if (conf.getBool("json")){
std::cout << capa.toString() << std::endl;
return -1;
}
if (!ret){
std::cerr << "Usage error: missing argument(s)." << std::endl;
conf.printHelp(std::cout);
return 1;
}
Connector_TS::streamName = conf.getString("streamname");
Connector_TS::trackIDs = conf.getString("tracks");
return conf.serveForkedSocket(Connector_TS::tsConnector);
} //main