LTS Commits

This commit is contained in:
Thulinma 2015-04-05 21:38:36 +02:00
parent f24d97b510
commit 4bdbd82f66
72 changed files with 8245 additions and 105 deletions

View file

@ -1,6 +1,14 @@
#include OUTPUTTYPE
#include <mist/config.h>
#include <mist/socket.h>
#include <mist/defines.h>
/*LTS-START*/
#ifdef GEOIP
#define GEOIPV4 "GeoIP.dat"
#define GEOIPV6 "GeoIPv6.dat"
#endif
/*LTS-END*/
int spawnForked(Socket::Connection & S){
mistOut tmp(S);
@ -10,6 +18,21 @@ int spawnForked(Socket::Connection & S){
int main(int argc, char * argv[]) {
Util::Config conf(argv[0], PACKAGE_VERSION);
mistOut::init(&conf);
/*LTS-START*/
#ifdef GEOIP
mistOut::geoIP4 = GeoIP_open("/usr/share/GeoIP/" GEOIPV4, GEOIP_STANDARD | GEOIP_CHECK_CACHE);
if (!mistOut::geoIP4){
mistOut::geoIP4 = GeoIP_open(GEOIPV4, GEOIP_STANDARD | GEOIP_CHECK_CACHE);
}
mistOut::geoIP6 = GeoIP_open("/usr/share/GeoIP/" GEOIPV6, GEOIP_STANDARD | GEOIP_CHECK_CACHE);
if (!mistOut::geoIP6){
mistOut::geoIP6 = GeoIP_open(GEOIPV6, GEOIP_STANDARD | GEOIP_CHECK_CACHE);
}
if (!mistOut::geoIP4 || !mistOut::geoIP6){
DEBUG_MSG(DLVL_FAIL, "Could not load all GeoIP databases. %s: %s, %s: %s", GEOIPV4, mistOut::geoIP4?"success":"fail", GEOIPV6, mistOut::geoIP6?"success":"fail");
}
#endif
/*LTS-END*/
if (conf.parseArgs(argc, argv)) {
if (conf.getBool("json")) {
std::cout << mistOut::capa.toString() << std::endl;
@ -23,5 +46,11 @@ int main(int argc, char * argv[]) {
return tmp.run();
}
}
/*LTS-START*/
#ifdef GEOIP
GeoIP_delete(mistOut::geoIP4);
GeoIP_delete(mistOut::geoIP6);
#endif
/*LTS-END*/
return 0;
}

View file

@ -12,6 +12,12 @@
#include <mist/timing.h>
#include "output.h"
/*LTS-START*/
#include <arpa/inet.h>
#include <sys/socket.h>
#include <netdb.h>
/*LTS-END*/
namespace Mist {
JSON::Value Output::capa = JSON::Value();
@ -29,6 +35,11 @@ namespace Mist {
capa["optional"]["debug"]["help"] = "The debug level at which messages need to be printed.";
capa["optional"]["debug"]["option"] = "--debug";
capa["optional"]["debug"]["type"] = "debug";
capa["optional"]["startpos"]["name"] = "Starting position in live buffer";
capa["optional"]["startpos"]["help"] = "For live, where in the buffer the stream starts playback by default. 0 = beginning, 1000 = end";
capa["optional"]["startpos"]["option"] = "--startPos";
capa["optional"]["startpos"]["type"] = "uint";
cfg->addOption("startpos", JSON::fromString("{\"arg\":\"uint\",\"default\":500,\"short\":\"P\",\"long\":\"startPos\",\"help\":\"For live, where in the buffer the stream starts playback by default. 0 = beginning, 1000 = end\"}"));
}
Output::Output(Socket::Connection & conn) : myConn(conn) {
@ -377,6 +388,273 @@ namespace Mist {
return false;
}
}
/*LTS-START*/
bool Output::onList(std::string ip, std::string list){
if (list == ""){
return false;
}
std::string entry;
std::string lowerIpv6;//lower-case
std::string upperIpv6;//full-caps
do{
entry = list.substr(0,list.find(" "));//make sure we have a single entry
lowerIpv6 = "::ffff:" + entry;
upperIpv6 = "::FFFF:" + entry;
if (entry == ip || lowerIpv6 == ip || upperIpv6 == ip){
return true;
}
long long unsigned int starPos = entry.find("*");
if (starPos == std::string::npos){
if (ip == entry){
return true;
}
}else{
if (starPos == 0){//beginning of the filter
if (ip.substr(ip.length() - entry.size() - 1) == entry.substr(1)){
return true;
}
}else{
if (starPos == entry.size() - 1){//end of the filter
if (ip.find(entry.substr(0, entry.size() - 1)) == 0 ){
return true;
}
if (ip.find(entry.substr(0, lowerIpv6.size() - 1)) == 0 ){
return true;
}
if (ip.find(entry.substr(0, upperIpv6.size() - 1)) == 0 ){
return true;
}
}else{
Log("CONF","Invalid list entry detected: " + entry);
}
}
}
list.erase(0, entry.size() + 1);
}while (list != "");
return false;
}
void Output::Log(std::string type, std::string message){
/// \todo These logs need to show up in the controller.
/// \todo Additionally, the triggering and untriggering of limits should be recorded in the controller as well.
if (type == "HLIM"){
DEBUG_MSG(DLVL_HIGH, "HardLimit Triggered: %s", message.c_str());
}
if (type == "SLIM"){
DEBUG_MSG(DLVL_HIGH, "SoftLimit Triggered: %s", message.c_str());
}
}
std::string Output::hostLookup(std::string ip){
struct sockaddr_in6 sa;
char hostName[1024];
char service[20];
if (inet_pton(AF_INET6, ip.c_str(), &(sa.sin6_addr)) != 1){
return "\n";
}
sa.sin6_family = AF_INET6;
sa.sin6_port = 0;
sa.sin6_flowinfo = 0;
sa.sin6_scope_id = 0;
int tmpRet = getnameinfo((struct sockaddr*)&sa, sizeof sa, hostName, sizeof hostName, service, sizeof service, NI_NAMEREQD );
if ( tmpRet == 0){
return hostName;
}
return "";
}
bool Output::isBlacklisted(std::string host, std::string streamName, int timeConnected){
return false;//blacklisting temporarily disabled for performance reasons
JSON::Value Storage = JSON::fromFile(Util::getTmpFolder() + "streamlist");
std::string myHostName = hostLookup(host);
if (myHostName == "\n"){
return false;
}
std::string myCountryName = getCountry(host);
JSON::ArrIter limitIt;
bool hasWhitelist = false;
bool hostOnWhitelist = false;
if (Storage["streams"].isMember(streamName)){
if (Storage["streams"][streamName].isMember("limits") && Storage["streams"][streamName]["limits"].size()){
for (limitIt = Storage["streams"][streamName]["limits"].ArrBegin(); limitIt != Storage["streams"][streamName]["limits"].ArrEnd(); limitIt++){
if ((*limitIt)["name"].asString() == "host"){
if ((*limitIt)["value"].asString()[0] == '+'){
if (!onList(host, (*limitIt)["value"].asString().substr(1))){
if (myHostName == ""){
if (timeConnected > Storage["config"]["limit_timeout"].asInt()){
return true;
}
}else{
if ( !onList(myHostName, (*limitIt)["value"].asStringRef().substr(1))){
if ((*limitIt)["type"].asStringRef() == "hard"){
Log("HLIM", "Host " + host + " not whitelisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " not whitelisted for stream " + streamName);
}
}
}
}
}else{
if ((*limitIt)["value"].asStringRef().size() > 1 && (*limitIt)["value"].asStringRef()[0] == '-'){
if (onList(host, (*limitIt)["value"].asStringRef().substr(1))){
if ((*limitIt)["type"].asStringRef() == "hard"){
Log("HLIM", "Host " + host + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " blacklisted for stream " + streamName);
}
}
if (myHostName != "" && onList(myHostName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asStringRef() == "hard"){
Log("HLIM", "Host " + myHostName + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + myHostName + " blacklisted for stream " + streamName);
}
}
}
}
}
if ((*limitIt)["name"].asString() == "geo"){
if ((*limitIt)["value"].asString()[0] == '+'){
if (myCountryName == ""){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with unknown location blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with unknown location blacklisted for stream " + streamName);
}
}
if (!onList(myCountryName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with location " + myCountryName + " not whitelisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with location " + myCountryName + " not whitelisted for stream " + streamName);
}
}
}else{
if ((*limitIt)["value"].asString()[0] == '-'){
if (onList(myCountryName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with location " + myCountryName + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with location " + myCountryName + " blacklisted for stream " + streamName);
}
}
}
}
}
}
}
}
if (Storage["config"]["limits"].size()){
for (limitIt = Storage["config"]["limits"].ArrBegin(); limitIt != Storage["config"]["limits"].ArrEnd(); limitIt++){
if ((*limitIt)["name"].asString() == "host"){
if ((*limitIt)["value"].asString()[0] == '+'){
if (!onList(host, (*limitIt)["value"].asString().substr(1))){
if (myHostName == ""){
if (timeConnected > Storage["config"]["limit_timeout"].asInt()){
return true;
}
}else{
if ( !onList(myHostName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " not whitelisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " not whitelisted for stream " + streamName);
}
}
}
}
}else{
if ((*limitIt)["value"].asString()[0] == '-'){
if (onList(host, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " blacklisted for stream " + streamName);
}
}
if (myHostName != "" && onList(myHostName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + myHostName + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + myHostName + " blacklisted for stream " + streamName);
}
}
}
}
}
if ((*limitIt)["name"].asString() == "geo"){
if ((*limitIt)["value"].asString()[0] == '+'){
if (myCountryName == ""){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with unknown location blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with unknown location blacklisted for stream " + streamName);
}
}
if (!onList(myCountryName, (*limitIt)["value"].asString().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with location " + myCountryName + " not whitelisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with location " + myCountryName + " not whitelisted for stream " + streamName);
}
}
}else{
if ((*limitIt)["value"].asStringRef().size() > 1 && (*limitIt)["value"].asStringRef()[0] == '-'){
if (onList(myCountryName, (*limitIt)["value"].asStringRef().substr(1))){
if ((*limitIt)["type"].asString() == "hard"){
Log("HLIM", "Host " + host + " with location " + myCountryName + " blacklisted for stream " + streamName);
return true;
}else{
Log("SLIM", "Host " + host + " with location " + myCountryName + " blacklisted for stream " + streamName);
}
}
}
}
}
}
}
if (hasWhitelist){
if (hostOnWhitelist || myHostName == ""){
return false;
}else{
return true;
}
}
return false;
}
#ifdef GEOIP
GeoIP * Output::geoIP4 = 0;
GeoIP * Output::geoIP6 = 0;
#endif
std::string Output::getCountry(std::string ip){
char * code = NULL;
#ifdef GEOIP
if (geoIP4){
code = (char*)GeoIP_country_code_by_addr(geoIP4, ip.c_str());
}
if (!code && geoIP6){
code = (char*)GeoIP_country_code_by_addr_v6(geoIP6, ip.c_str());
}
#endif
if (!code){
return "";
}
return code;
}
/*LTS-END*/
void Output::requestHandler(){
static bool firstData = true;//only the first time, we call onRequest if there's data buffered already.
@ -586,6 +864,12 @@ namespace Mist {
if (statsPage.getData()){
unsigned long long int now = Util::epoch();
if (now != lastStats){
/*LTS-START*/
if (statsPage.getData()[-1] > 127){
myConn.close();
return;
}
/*LTS-END*/
lastStats = now;
IPC::statExchange tmpEx(statsPage.getData());
tmpEx.now(now);

View file

@ -9,15 +9,20 @@
#include <mist/dtsc.h>
#include <mist/socket.h>
#include <mist/shared_memory.h>
/*LTS-START*/
#ifdef GEOIP
#include <GeoIP.h>
#endif
/*LTS-END*/
#include "../io.h"
namespace Mist {
/// This struct keeps packet information sorted in playback order, so the
/// Mist::Output class knows when to buffer which packet.
struct sortedPageInfo{
struct sortedPageInfo {
bool operator < (const sortedPageInfo & rhs) const {
if (time < rhs.time){
if (time < rhs.time) {
return true;
}
return (time == rhs.time && tid < rhs.tid);
@ -41,6 +46,12 @@ namespace Mist {
//static members for initialization and capabilities
static void init(Util::Config * cfg);
static JSON::Value capa;
/*LTS-START*/
#ifdef GEOIP
static GeoIP * geoIP4;
static GeoIP * geoIP6;
#endif
/*LTS-END*/
//non-virtual generic functions
int run();
void stats();
@ -64,6 +75,14 @@ namespace Mist {
virtual void onFail();
virtual void requestHandler();
private://these *should* not be messed with in child classes.
/*LTS-START*/
void Log(std::string type, std::string message);
bool checkLimits();
bool isBlacklisted(std::string host, std::string streamName, int timeConnected);
std::string hostLookup(std::string ip);
bool onList(std::string ip, std::string list);
std::string getCountry(std::string ip);
/*LTS-END*/
std::map<unsigned long, unsigned int> currKeyOpen;
void loadPageForKey(long unsigned int trackId, long long int keyNum);
int pageNumForKey(long unsigned int trackId, long long int keyNum);
@ -82,7 +101,7 @@ namespace Mist {
unsigned int maxSkipAhead;///< Maximum ms that we will go ahead of the intended timestamps.
unsigned int minSkipAhead;///< Minimum ms that we will go ahead of the intended timestamps.
unsigned int realTime;///< Playback speed times 1000 (1000 == 1.0X). Zero is infinite.
//Read/write status variables
Socket::Connection & myConn;///< Connection to the client.
@ -97,3 +116,4 @@ namespace Mist {
};
}

View file

@ -0,0 +1,669 @@
#include "output_dash_mp4.h"
#include <mist/defines.h>
#include <mist/mp4.h>
#include <mist/mp4_generic.h>
#include <mist/mp4_dash.h>
#include <mist/checksum.h>
namespace Mist {
OutDashMP4::OutDashMP4(Socket::Connection & conn) : HTTPOutput(conn){realTime = 0;}
OutDashMP4::~OutDashMP4(){}
std::string OutDashMP4::makeTime(long long unsigned int time){
std::stringstream r;
r << "PT" << (((time / 1000) / 60) /60) << "H" << ((time / 1000) / 60) % 60 << "M" << (time / 1000) % 60 << "." << time % 1000 / 10 << "S";
return r.str();
}
void OutDashMP4::buildFtyp(unsigned int tid){
H.Chunkify("\000\000\000", 3, myConn);
H.Chunkify("\040", 1, myConn);
H.Chunkify("ftypisom\000\000\000\000isom", 16, myConn);
if (myMeta.tracks[tid].type == "video"){
H.Chunkify("avc1", 4, myConn);
}else{
H.Chunkify("M4A ", 4, myConn);
}
H.Chunkify("mp42dash", 8, myConn);
}
void OutDashMP4::buildStyp(unsigned int tid){
H.Chunkify("\000\000\000\030stypmsdh\000\000\000\000msdhmsix", 24, myConn);
}
std::string OutDashMP4::buildMoov(unsigned int tid){
std::string trackType = myMeta.tracks[tid].type;
MP4::MOOV moovBox;
MP4::MVHD mvhdBox(0);
mvhdBox.setTrackID(2);
mvhdBox.setDuration(0xFFFFFFFF);
moovBox.setContent(mvhdBox, 0);
MP4::IODS iodsBox;
if (trackType == "video"){
iodsBox.setODVideoLevel(0xFE);
}else{
iodsBox.setODAudioLevel(0xFE);
}
moovBox.setContent(iodsBox, 1);
MP4::MVEX mvexBox;
MP4::MEHD mehdBox;
mehdBox.setFragmentDuration(0xFFFFFFFF);
mvexBox.setContent(mehdBox, 0);
MP4::TREX trexBox;
trexBox.setTrackID(1);
mvexBox.setContent(trexBox, 1);
moovBox.setContent(mvexBox, 2);
MP4::TRAK trakBox;
MP4::TKHD tkhdBox(1, 0, myMeta.tracks[tid].width, myMeta.tracks[tid].height);
tkhdBox.setFlags(3);
if (trackType == "audio"){
tkhdBox.setVolume(256);
tkhdBox.setWidth(0);
tkhdBox.setHeight(0);
}
tkhdBox.setDuration(0xFFFFFFFF);
trakBox.setContent(tkhdBox, 0);
MP4::MDIA mdiaBox;
MP4::MDHD mdhdBox(0);
mdhdBox.setLanguage(0x44);
mdhdBox.setDuration(myMeta.tracks[tid].lastms);
mdiaBox.setContent(mdhdBox, 0);
if (trackType == "video"){
MP4::HDLR hdlrBox(myMeta.tracks[tid].type,"VideoHandler");
mdiaBox.setContent(hdlrBox, 1);
}else{
MP4::HDLR hdlrBox(myMeta.tracks[tid].type,"SoundHandler");
mdiaBox.setContent(hdlrBox, 1);
}
MP4::MINF minfBox;
MP4::DINF dinfBox;
MP4::DREF drefBox;
dinfBox.setContent(drefBox, 0);
minfBox.setContent(dinfBox, 0);
MP4::STBL stblBox;
MP4::STSD stsdBox;
stsdBox.setVersion(0);
if (myMeta.tracks[tid].codec == "H264"){
MP4::AVC1 avc1Box;
avc1Box.setWidth(myMeta.tracks[tid].width);
avc1Box.setHeight(myMeta.tracks[tid].height);
MP4::AVCC avccBox;
avccBox.setPayload(myMeta.tracks[tid].init);
avc1Box.setCLAP(avccBox);
stsdBox.setEntry(avc1Box, 0);
}
if (myMeta.tracks[tid].codec == "HEVC"){
MP4::HEV1 hev1Box;
hev1Box.setWidth(myMeta.tracks[tid].width);
hev1Box.setHeight(myMeta.tracks[tid].height);
MP4::HVCC hvccBox;
hvccBox.setPayload(myMeta.tracks[tid].init);
hev1Box.setCLAP(hvccBox);
stsdBox.setEntry(hev1Box, 0);
}
if (myMeta.tracks[tid].codec == "AAC"){
MP4::AudioSampleEntry ase;
ase.setCodec("mp4a");
ase.setDataReferenceIndex(1);
ase.setSampleRate(myMeta.tracks[tid].rate);
ase.setChannelCount(myMeta.tracks[tid].channels);
ase.setSampleSize(myMeta.tracks[tid].size);
MP4::ESDS esdsBox(myMeta.tracks[tid].init);
ase.setCodecBox(esdsBox);
stsdBox.setEntry(ase,0);
}
if (myMeta.tracks[tid].codec == "AC3"){
///\todo Note: this code is copied, note for muxing seperation
MP4::AudioSampleEntry ase;
ase.setCodec("ac-3");
ase.setDataReferenceIndex(1);
ase.setSampleRate(myMeta.tracks[tid].rate);
ase.setChannelCount(myMeta.tracks[tid].channels);
ase.setSampleSize(myMeta.tracks[tid].size);
MP4::DAC3 dac3Box;
switch (myMeta.tracks[tid].rate){
case 48000:
dac3Box.setSampleRateCode(0);
break;
case 44100:
dac3Box.setSampleRateCode(1);
break;
case 32000:
dac3Box.setSampleRateCode(2);
break;
default:
dac3Box.setSampleRateCode(3);
break;
}
/// \todo the next settings are set to generic values, we might want to make these flexible
dac3Box.setBitStreamIdentification(8);//check the docs, this is a weird property
dac3Box.setBitStreamMode(0);//set to main, mixed audio
dac3Box.setAudioConfigMode(2);///\todo find out if ACMode should be different
if (myMeta.tracks[tid].channels > 4){
dac3Box.setLowFrequencyEffectsChannelOn(1);
}else{
dac3Box.setLowFrequencyEffectsChannelOn(0);
}
dac3Box.setFrameSizeCode(20);//should be OK, but test this.
ase.setCodecBox(dac3Box);
}
stblBox.setContent(stsdBox, 0);
MP4::STTS sttsBox;
sttsBox.setVersion(0);
stblBox.setContent(sttsBox, 1);
MP4::STSC stscBox;
stscBox.setVersion(0);
stblBox.setContent(stscBox, 2);
MP4::STCO stcoBox;
stcoBox.setVersion(0);
stblBox.setContent(stcoBox, 3);
MP4::STSZ stszBox;
stszBox.setVersion(0);
stblBox.setContent(stszBox, 4);
minfBox.setContent(stblBox, 1);
if (trackType == "video"){
MP4::VMHD vmhdBox;
vmhdBox.setFlags(1);
minfBox.setContent(vmhdBox, 2);
}else{
MP4::SMHD smhdBox;
minfBox.setContent(smhdBox, 2);
}
mdiaBox.setContent(minfBox, 2);
trakBox.setContent(mdiaBox, 1);
moovBox.setContent(trakBox, 3);
return std::string(moovBox.asBox(),moovBox.boxedSize());
}
std::string OutDashMP4::buildSidx(unsigned int tid){
MP4::AVCC avccBox;
MP4::HVCC hvccBox;
if (myMeta.tracks[tid].codec == "H264"){
avccBox.setPayload(myMeta.tracks[tid].init);
}
if (myMeta.tracks[tid].codec == "HEVC"){
hvccBox.setPayload(myMeta.tracks[tid].init);
}
int curPart = 0;
MP4::SIDX sidxBox;
sidxBox.setReferenceID(1);
sidxBox.setTimescale(1000);
sidxBox.setEarliestPresentationTime(myMeta.tracks[tid].firstms);
sidxBox.setFirstOffset(0);
int j = 0;
for (std::deque<DTSC::Key>::iterator it = myMeta.tracks[tid].keys.begin(); it != myMeta.tracks[tid].keys.end(); it++){
MP4::sidxReference refItem;
refItem.referenceType = false;
refItem.referencedSize = 0;
for (int i = 0; i < it->getParts(); i++){
refItem.referencedSize += myMeta.tracks[tid].parts[curPart++].getSize();
}
if (myMeta.tracks[tid].codec == "H264"){
refItem.referencedSize += 14 + avccBox.getSPSLen() + avccBox.getPPSLen();
}
if (myMeta.tracks[tid].codec == "HEVC"){
std::deque<MP4::HVCCArrayEntry> content = hvccBox.getArrays();
for (std::deque<MP4::HVCCArrayEntry>::iterator it = content.begin(); it != content.end(); it++){
for (std::deque<std::string>::iterator it2 = it->nalUnits.begin(); it2 != it->nalUnits.end(); it2++){
refItem.referencedSize += 4 + (*it2).size();
}
}
}
fragmentSizes[tid][j] = refItem.referencedSize;
if (it->getLength()){
refItem.subSegmentDuration = it->getLength();
}else{
refItem.subSegmentDuration = myMeta.tracks[tid].lastms - it->getTime();
}
refItem.sapStart = false;
refItem.sapType = 0;
refItem.sapDeltaTime = 0;
sidxBox.setReference(refItem, j++);
}
return std::string(sidxBox.asBox(),sidxBox.boxedSize());
}
std::string OutDashMP4::buildSidx(unsigned int tid, unsigned int keyNum){
MP4::AVCC avccBox;
avccBox.setPayload(myMeta.tracks[tid].init);
int curPart = 0;
MP4::SIDX sidxBox;
sidxBox.setReferenceID(1);
sidxBox.setTimescale(1000);
sidxBox.setEarliestPresentationTime(myMeta.tracks[tid].keys[keyNum].getTime());
sidxBox.setFirstOffset(0);
for (int i = 0; i < keyNum; i++){
curPart += myMeta.tracks[tid].keys[i].getParts();
}
MP4::sidxReference refItem;
refItem.referenceType = false;
if (myMeta.tracks[tid].keys[keyNum].getLength()){
refItem.subSegmentDuration = myMeta.tracks[tid].keys[keyNum].getLength();
}else{
refItem.subSegmentDuration = myMeta.tracks[tid].lastms - myMeta.tracks[tid].keys[keyNum].getTime();
}
refItem.sapStart = false;
refItem.sapType = 0;
refItem.sapDeltaTime = 0;
sidxBox.setReference(refItem, 0);
return std::string(sidxBox.asBox(),sidxBox.boxedSize());
}
std::string OutDashMP4::buildMoof(unsigned int tid, unsigned int keyNum){
MP4::MOOF moofBox;
MP4::MFHD mfhdBox;
mfhdBox.setSequenceNumber(keyNum + 1);
moofBox.setContent(mfhdBox, 0);
MP4::TRAF trafBox;
MP4::TFHD tfhdBox;
if (myMeta.tracks[tid].codec == "H264" || myMeta.tracks[tid].codec == "HEVC"){
tfhdBox.setTrackID(1);
}
if (myMeta.tracks[tid].codec == "AAC"){
tfhdBox.setFlags(MP4::tfhdSampleFlag);
tfhdBox.setTrackID(1);
tfhdBox.setDefaultSampleFlags(MP4::isKeySample);
}
trafBox.setContent(tfhdBox, 0);
MP4::TFDT tfdtBox;
///\todo Determine index for live
tfdtBox.setBaseMediaDecodeTime(myMeta.tracks[tid].keys[keyNum].getTime());
trafBox.setContent(tfdtBox, 1);
int i = 0;
for (int j = 0; j < keyNum; j++){
i += myMeta.tracks[tid].keys[j].getParts();
}
MP4::TRUN trunBox;
if (myMeta.tracks[tid].codec == "H264"){
trunBox.setFlags(MP4::trundataOffset | MP4::trunsampleSize | MP4::trunsampleDuration | MP4::trunfirstSampleFlags | MP4::trunsampleOffsets);
trunBox.setFirstSampleFlags(MP4::isKeySample);
trunBox.setDataOffset(88 + (12 * myMeta.tracks[tid].keys[keyNum].getParts()) + 8);
MP4::AVCC avccBox;
avccBox.setPayload(myMeta.tracks[tid].init);
for (int j = 0; j < myMeta.tracks[tid].keys[keyNum].getParts(); j++){
MP4::trunSampleInformation trunEntry;
if (!j){
trunEntry.sampleSize = myMeta.tracks[tid].parts[i].getSize() + 14 + avccBox.getSPSLen() + avccBox.getPPSLen();
}else{
trunEntry.sampleSize = myMeta.tracks[tid].parts[i].getSize();
}
trunEntry.sampleDuration = myMeta.tracks[tid].parts[i].getDuration();
trunEntry.sampleOffset = myMeta.tracks[tid].parts[i].getOffset();
trunBox.setSampleInformation(trunEntry, j);
i++;
}
}
if (myMeta.tracks[tid].codec == "HEVC"){
trunBox.setFlags(MP4::trundataOffset | MP4::trunsampleSize | MP4::trunsampleDuration | MP4::trunfirstSampleFlags | MP4::trunsampleOffsets);
trunBox.setFirstSampleFlags(MP4::isKeySample);
trunBox.setDataOffset(88 + (12 * myMeta.tracks[tid].keys[keyNum].getParts()) + 8);
MP4::HVCC hvccBox;
hvccBox.setPayload(myMeta.tracks[tid].init);
std::deque<MP4::HVCCArrayEntry> content = hvccBox.getArrays();
for (int j = 0; j < myMeta.tracks[tid].keys[keyNum].getParts(); j++){
MP4::trunSampleInformation trunEntry;
trunEntry.sampleSize = myMeta.tracks[tid].parts[i].getSize();
if (!j){
for (std::deque<MP4::HVCCArrayEntry>::iterator it = content.begin(); it != content.end(); it++){
for (std::deque<std::string>::iterator it2 = it->nalUnits.begin(); it2 != it->nalUnits.end(); it2++){
trunEntry.sampleSize += 4 + (*it2).size();
}
}
}
trunEntry.sampleDuration = myMeta.tracks[tid].parts[i].getDuration();
trunEntry.sampleOffset = myMeta.tracks[tid].parts[i].getOffset();
trunBox.setSampleInformation(trunEntry, j);
i++;
}
}
if (myMeta.tracks[tid].codec == "AAC"){
trunBox.setFlags(MP4::trundataOffset | MP4::trunsampleSize | MP4::trunsampleDuration);
trunBox.setDataOffset(88 + (8 * myMeta.tracks[tid].keys[keyNum].getParts()) + 8);
for (int j = 0; j < myMeta.tracks[tid].keys[keyNum].getParts(); j++){
MP4::trunSampleInformation trunEntry;
trunEntry.sampleSize = myMeta.tracks[tid].parts[i].getSize();
trunEntry.sampleDuration = myMeta.tracks[tid].parts[i].getDuration();
trunBox.setSampleInformation(trunEntry, j);
i++;
}
}
trafBox.setContent(trunBox, 2);
moofBox.setContent(trafBox, 1);
return std::string(moofBox.asBox(), moofBox.boxedSize());
}
std::string OutDashMP4::buildNalUnit(unsigned int len, const char * data){
std::stringstream r;
r << (char)((len >> 24) & 0xFF);
r << (char)((len >> 16) & 0xFF);
r << (char)((len >> 8) & 0xFF);
r << (char)((len) & 0xFF);
r << std::string(data, len);
return r.str();
}
void OutDashMP4::buildMdat(unsigned int tid, unsigned int keyNum){
MP4::AVCC avccBox;
avccBox.setPayload(myMeta.tracks[tid].init);
std::stringstream r;
int size = fragmentSizes[tid][keyNum] + 8;
r << (char)((size >> 24) & 0xFF);
r << (char)((size >> 16) & 0xFF);
r << (char)((size >> 8) & 0xFF);
r << (char)((size) & 0xFF);
r << "mdat";
H.Chunkify(r.str().data(), r.str().size(), myConn);
selectedTracks.clear();
selectedTracks.insert(tid);
seek(myMeta.tracks[tid].keys[keyNum].getTime());
std::string init;
char * data;
unsigned int dataLen;
int partNum = 0;
for (int i = 0; i < keyNum; i++){
partNum += myMeta.tracks[tid].keys[i].getParts();
}
if (myMeta.tracks[tid].codec == "H264"){
init = buildNalUnit(2, "\011\340");
H.Chunkify(init, myConn);//09E0
init = buildNalUnit(avccBox.getSPSLen(), avccBox.getSPS());
H.Chunkify(init, myConn);
init = buildNalUnit(avccBox.getPPSLen(), avccBox.getPPS());
H.Chunkify(init, myConn);
}
if (myMeta.tracks[tid].codec == "HEVC"){
MP4::HVCC hvccBox;
hvccBox.setPayload(myMeta.tracks[tid].init);
std::deque<MP4::HVCCArrayEntry> content = hvccBox.getArrays();
for (int j = 0; j < myMeta.tracks[tid].keys[keyNum].getParts(); j++){
for (std::deque<MP4::HVCCArrayEntry>::iterator it = content.begin(); it != content.end(); it++){
for (std::deque<std::string>::iterator it2 = it->nalUnits.begin(); it2 != it->nalUnits.end(); it2++){
init = buildNalUnit((*it2).size(), (*it2).c_str());
H.Chunkify(init, myConn);
}
}
}
}
for (int i = 0; i < myMeta.tracks[tid].keys[keyNum].getParts(); i++){
prepareNext();
thisPacket.getString("data", data, dataLen);
H.Chunkify(data, dataLen, myConn);
}
return;
}
std::string OutDashMP4::buildManifest(){
initialize();
int lastTime = 0;
int lastVidTime = 0;
int vidKeys = 0;
int vidInitTrack = 0;
int lastAudTime = 0;
int audKeys = 0;
int audInitTrack = 0;
for (std::map<unsigned int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it ++){
if (it->second.lastms > lastTime){
lastTime = it->second.lastms;
}
if (it->second.codec == "H264" && it->second.lastms > lastVidTime){
lastVidTime = it->second.lastms;
vidKeys = it->second.keys.size();
vidInitTrack = it->first;
}
if (it->second.codec == "HEVC" && it->second.lastms > lastVidTime){
lastVidTime = it->second.lastms;
vidKeys = it->second.keys.size();
vidInitTrack = it->first;
}
if (it->second.codec == "AAC" && it->second.lastms > lastAudTime){
lastAudTime = it->second.lastms;
audKeys = it->second.keys.size();
audInitTrack = it->first;
}
}
std::stringstream r;
r << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" << std::endl;
r << "<MPD xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns=\"urn:mpeg:dash:schema:mpd:2011\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xsi:schemaLocation=\"urn:mpeg:DASH:schema:MPD:2011 http://standards.iso.org/ittf/PubliclyAvailableStandards/MPEG-DASH_schema_files/DASH-MPD.xsd\" profiles=\"urn:mpeg:dash:profile:isoff-live:2011\" type=\"static\" mediaPresentationDuration=\"" << makeTime(lastTime) << "\" minBufferTime=\"PT1.5S\" >" << std::endl;
r << " <ProgramInformation><Title>" << streamName << "</Title></ProgramInformation>" << std::endl;
r << " <Period start=\"PT0S\">" << std::endl;
if (vidInitTrack){
r << " <AdaptationSet id=\"0\" mimeType=\"video/mp4\" width=\"" << myMeta.tracks[vidInitTrack].width << "\" height=\"" << myMeta.tracks[vidInitTrack].height << "\" frameRate=\"" << myMeta.tracks[vidInitTrack].fpks / 1000 << "\" segmentAlignment=\"true\" startWithSAP=\"1\" subsegmentAlignment=\"true\" subsegmentStartsWithSAP=\"1\">" << std::endl;
r << " <SegmentTemplate timescale=\"1000\" media=\"chunk_$RepresentationID$_$Time$.m4s\" initialization=\"chunk_$RepresentationID$_init.m4s\">" << std::endl;
r << " <SegmentTimeline>" << std::endl;
for (int i = 0; i < myMeta.tracks[vidInitTrack].keys.size() - 1; i++){
r << " <S " << (i == 0 ? "t=\"0\" " : "") << "d=\"" << myMeta.tracks[vidInitTrack].keys[i].getLength() << "\" />" << std::endl;
}
int lastDur = myMeta.tracks[vidInitTrack].lastms - myMeta.tracks[vidInitTrack].keys.rbegin()->getTime();
r << " <S d=\"" << lastDur << "\" />" << std::endl;
r << " </SegmentTimeline>" << std::endl;
r << " </SegmentTemplate>" << std::endl;
for (std::map<unsigned int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "H264"){
MP4::AVCC avccBox;
avccBox.setPayload(it->second.init);
r << " <Representation ";
r << "id=\"" << it->first << "\" ";
r << "codecs=\"avc1.";
r << std::hex << std::setw(2) << std::setfill('0') << (int)avccBox.getSPS()[0] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)avccBox.getSPS()[1] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)avccBox.getSPS()[2] << std::dec;
r << "\" ";
r << "bandwidth=\"" << it->second.bps << "\" ";
r << "/>" << std::endl;
}
if (it->second.codec == "HEVC"){
r << " <Representation ";
r << "id=\"" << it->first << "\" ";
r << "codecs=\"hev1.";
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[1] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[6] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[7] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[8] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[9] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[10] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[11] << std::dec;
r << std::hex << std::setw(2) << std::setfill('0') << (int)it->second.init[12] << std::dec;
r << "\" ";
r << "bandwidth=\"" << it->second.bps << "\" ";
r << "/>" << std::endl;
}
}
r << " </AdaptationSet>" << std::endl;
}
if (audInitTrack){
r << " <AdaptationSet id=\"1\" mimeType=\"audio/mp4\" segmentAlignment=\"true\" startWithSAP=\"1\" subsegmentAlignment=\"true\" subsegmentStartsWithSAP=\"1\" >" << std::endl;
r << " <Role schemeIdUri=\"urn:mpeg:dash:role:2011\" value=\"main\"/>" << std::endl;
r << " <SegmentTemplate timescale=\"1000\" media=\"chunk_$RepresentationID$_$Time$.m4s\" initialization=\"chunk_$RepresentationID$_init.m4s\">" << std::endl;
r << " <SegmentTimeline>" << std::endl;
for (int i = 0; i < myMeta.tracks[audInitTrack].keys.size() - 1; i++){
r << " <S " << (i == 0 ? "t=\"0\" " : "") << "d=\"" << myMeta.tracks[audInitTrack].keys[i].getLength() << "\" />" << std::endl;
}
int lastDur = myMeta.tracks[audInitTrack].lastms - myMeta.tracks[audInitTrack].keys.rbegin()->getTime();
r << " <S d=\"" << lastDur << "\" />" << std::endl;
r << " </SegmentTimeline>" << std::endl;
r << " </SegmentTemplate>" << std::endl;
for (std::map<unsigned int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "AAC"){
r << " <Representation ";
r << "id=\"" << it->first << "\" ";
r << "codecs=\"mp4a.40.2\" ";
r << "audioSamplingRate=\"" << it->second.rate << "\" ";
r << "bandwidth=\"" << it->second.bps << "\">" << std::endl;
r << " <AudioChannelConfiguration schemeIdUri=\"urn:mpeg:dash:23003:3:audio_channel_configuration:2011\" value=\"" << it->second.channels << "\" />" << std::endl;
r << " </Representation>" << std::endl;
}
}
r << " </AdaptationSet>" << std::endl;
}
r << " </Period>" << std::endl;
r << "</MPD>" << std::endl;
return r.str();
}
void OutDashMP4::init(Util::Config * cfg){
HTTPOutput::init(cfg);
capa["name"] = "DASHMP4";
capa["desc"] = "Enables HTTP protocol progressive streaming.";
capa["url_rel"] = "/dash/$/index.mpd";
capa["url_prefix"] = "/dash/$/";
capa["socket"] = "http_dash_mp4";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("AC3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "dash/video/mp4";
capa["methods"][0u]["priority"] = 8ll;
capa["methods"][0u]["nolive"] = 1;
}
/// Parses a "Range: " header, setting byteStart, byteEnd and seekPoint using data from metadata and tracks to do
/// the calculations.
/// On error, byteEnd is set to zero.
void OutDashMP4::parseRange(std::string header, long long & byteStart, long long & byteEnd){
int firstPos = header.find("=") + 1;
byteStart = atoll(header.substr(firstPos, header.find("-", firstPos)).c_str());
byteEnd = atoll(header.substr(header.find("-", firstPos) + 1).c_str());
DEBUG_MSG(DLVL_DEVEL, "Range request: %lli-%lli (%s)", byteStart, byteEnd, header.c_str());
}
int OutDashMP4::getKeyFromRange(unsigned int tid, long long int byteStart){
unsigned long long int currOffset = 0;
for (int i = 0; i < myMeta.tracks[tid].keys.size(); i++){
if (byteStart == currOffset){
return i;
}
if (byteStart < currOffset && i > 0){
return i - 1;
}
DEBUG_MSG(DLVL_DEVEL, "%lld > %llu", byteStart, currOffset);
}
return -1;
}
void OutDashMP4::initialize(){
HTTPOutput::initialize();
for (std::map<unsigned int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (!moovBoxes.count(it->first)){
moovBoxes[it->first] = buildMoov(it->first);
buildSidx(it->first);
}
}
}
void OutDashMP4::onHTTP(){
initialize();
std::string url = H.url;
if (H.method == "OPTIONS"){
H.Clean();
H.SetHeader("Content-Type", "application/octet-stream");
H.SetHeader("Cache-Control", "no-cache");
H.SetHeader("MistMultiplex", "No");
H.SetHeader("Access-Control-Allow-Origin", "*");
H.SetHeader("Access-Control-Allow-Methods", "GET, POST");
H.SetHeader("Access-Control-Allow-Headers", "Content-Type, X-Requested-With");
H.SetHeader("Access-Control-Allow-Credentials", "true");
H.SetBody("");
H.SendResponse("200", "OK", myConn);
H.Clean();
return;
}
if (url.find(".mpd") != std::string::npos){
H.Clean();
H.SetHeader("Content-Type", "application/xml");
H.SetHeader("Cache-Control", "no-cache");
H.SetHeader("MistMultiplex", "No");
H.SetHeader("Access-Control-Allow-Origin", "*");
H.SetHeader("Access-Control-Allow-Methods", "GET, POST");
H.SetHeader("Access-Control-Allow-Headers", "Content-Type, X-Requested-With");
H.SetHeader("Access-Control-Allow-Credentials", "true");
H.SetBody(buildManifest());
H.SendResponse("200", "OK", myConn);
DEVEL_MSG("Manifest sent");
}else{
long long int bench = Util::getMS();
int pos = url.find("chunk_") + 6;//put our marker just after the _ beyond chunk
int tid = atoi(url.substr(pos).c_str());
DEBUG_MSG(DLVL_DEVEL, "Track %d requested", tid);
H.Clean();
H.SetHeader("Content-Type", "video/mp4");
H.SetHeader("Cache-Control", "no-cache");
H.SetHeader("MistMultiplex", "No");
H.SetHeader("Access-Control-Allow-Origin", "*");
H.SetHeader("Access-Control-Allow-Methods", "GET, POST");
H.SetHeader("Access-Control-Allow-Headers", "Content-Type, X-Requested-With");
H.SetHeader("Access-Control-Allow-Credentials", "true");
H.StartResponse(H, myConn);
if (url.find("init.m4s") != std::string::npos){
DEBUG_MSG(DLVL_DEVEL, "Handling init");
buildFtyp(tid);
H.Chunkify(moovBoxes[tid], myConn);
}else{
pos = url.find("_", pos + 1) + 1;
int keyId = atoi(url.substr(pos).c_str());
DEBUG_MSG(DLVL_DEVEL, "Searching for time %d", keyId);
unsigned int keyNum = myMeta.tracks[tid].timeToKeynum(keyId);
INFO_MSG("Detected key %d:%d for time %d", tid, keyNum, keyId);
buildStyp(tid);
std::string tmp = buildSidx(tid, keyNum);
H.Chunkify(tmp, myConn);
tmp = buildMoof(tid, keyNum);
H.Chunkify(tmp, myConn);
buildMdat(tid, keyNum);
}
H.Chunkify("", 0, myConn);
H.Clean();
INFO_MSG("Done handling request, took %lld ms", Util::getMS() - bench);
return;
}
H.Clean();
parseData = false;
wantRequest = true;
}
void OutDashMP4::sendNext(){}
void OutDashMP4::sendHeader(){}
}

View file

@ -0,0 +1,33 @@
#include "output_http.h"
#include <mist/mp4_generic.h>
#include <mist/http_parser.h>
namespace Mist {
class OutDashMP4 : public HTTPOutput {
public:
OutDashMP4(Socket::Connection & conn);
~OutDashMP4();
static void init(Util::Config * cfg);
void onHTTP();
void sendNext();
void sendHeader();
void initialize();
protected:
std::string makeTime(long long unsigned int time);
std::string buildManifest();
void buildFtyp(unsigned int trackid);
void buildStyp(unsigned int trackid);
std::string buildMoov(unsigned int trackid);
std::string buildSidx(unsigned int trackid);
std::string buildSidx(unsigned int trackid, unsigned int keynum);
std::string buildMoof(unsigned int trackid, unsigned int keynum);
void buildMdat(unsigned int trackid, unsigned int keynum);
std::map<unsigned int, std::map<unsigned int, long long unsigned int> > fragmentSizes;
std::string buildNalUnit(unsigned int len, const char * data);
void parseRange(std::string header, long long & byteStart, long long & byteEnd);
int getKeyFromRange(unsigned int tid, long long int byteStart);
std::map<int,std::string> moovBoxes;
};
}
typedef Mist::OutDashMP4 mistOut;

View file

@ -57,6 +57,19 @@ namespace Mist {
int j = 0;
if (myMeta.tracks[tid].fragments.size()){
std::deque<DTSC::Fragment>::iterator fragIt = myMeta.tracks[tid].fragments.begin();
/*LTS-START*/
if (myMeta.live){
unsigned int skip = (( myMeta.tracks[tid].fragments.size()-1) * config->getInteger("startpos")) / 1000u;
for (unsigned int z = 0; z < skip; ++z){
++fragIt;
++j;
}
if (skip && fragIt == myMeta.tracks[tid].fragments.end()){
--fragIt;
--j;
}
}
/*LTS-END*/
unsigned int firstTime = myMeta.tracks[tid].getKey(fragIt->getNumber()).getTime();
while (fragIt != myMeta.tracks[tid].fragments.end()){
if (myMeta.vod || fragIt->getDuration() > 0){
@ -160,6 +173,7 @@ namespace Mist {
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "flash/11";
capa["methods"][0u]["priority"] = 7ll;
cfg->getOption("startpos", true)[0u] = 0ll;
}
void OutHDS::sendNext(){

View file

@ -11,7 +11,7 @@ namespace Mist {
int audioId = -1;
std::string audioName;
for (std::map<unsigned int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "AAC"){
if (it->second.codec == "AAC" || it->second.codec == "MP3" || it->second.codec == "AC3"){
audioId = it->first;
audioName = it->second.getIdentifier();
break;
@ -19,7 +19,7 @@ namespace Mist {
}
unsigned int vidTracks = 0;
for (std::map<unsigned int,DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
if (it->second.codec == "H264"){
if (it->second.codec == "H264" || it->second.codec == "HEVC"){
vidTracks++;
int bWidth = it->second.bps * 2;
if (bWidth < 5){
@ -84,6 +84,20 @@ namespace Mist {
}
//only print the last segment when VoD
lines.pop_back();
/*LTS-START*/
unsigned int skip = (( myMeta.tracks[tid].fragments.size()-1) * config->getInteger("startpos")) / 1000u;
while (skippedLines < skip && lines.size()){
lines.pop_front();
skippedLines++;
}
if (config->getInteger("listlimit")){
unsigned long listlimit = config->getInteger("listlimit");
while (lines.size() > listlimit){
lines.pop_front();
skippedLines++;
}
}
/*LTS-END*/
}
result << "#EXT-X-MEDIA-SEQUENCE:" << myMeta.tracks[tid].missedFrags + skippedLines << "\r\n";
@ -112,12 +126,22 @@ namespace Mist {
capa["desc"] = "Enables HTTP protocol Apple-specific streaming (also known as HLS).";
capa["url_rel"] = "/hls/$/index.m3u8";
capa["url_prefix"] = "/hls/$/";
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("AC3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/application/vnd.apple.mpegurl";
capa["methods"][0u]["priority"] = 9ll;
/*LTS-START*/
cfg->addOption("listlimit", JSON::fromString("{\"arg\":\"integer\",\"default\":0,\"short\":\"y\",\"long\":\"list-limit\",\"help\":\"Maximum number of parts in live playlists (0 = infinite).\"}"));
capa["optional"]["listlimit"]["name"] = "Live playlist limit";
capa["optional"]["listlimit"]["help"] = "Maximum number of parts in live playlists. (0 = infinite)";
capa["optional"]["listlimit"]["default"] = 0ll;
capa["optional"]["listlimit"]["type"] = "uint";
capa["optional"]["listlimit"]["option"] = "--list-limit";
/*LTS-END*/
}
int OutHLS::canSeekms(unsigned int ms){

View file

@ -3,6 +3,7 @@
#include <mist/mp4.h>
#include <mist/mp4_ms.h>
#include <mist/mp4_generic.h>
#include <mist/mp4_encryption.h> /*LTS*/
#include <mist/base64.h>
#include <mist/http_parser.h>
#include <mist/stream.h>
@ -261,7 +262,7 @@ namespace Mist {
int fragCount = 0;
for (unsigned int i = 0; fragCount < 2 && i < myMeta.tracks[tid].keys.size() - 1; i++) {
if (myMeta.tracks[tid].keys[i].getTime() > seekTime) {
DEBUG_MSG(DLVL_HIGH, "Key %d added to fragRef box, time %ld > %lld", i, myMeta.tracks[tid].keys[i].getTime(), seekTime);
DEBUG_MSG(DLVL_HIGH, "Key %d added to fragRef box, time %llu > %lld", i, myMeta.tracks[tid].keys[i].getTime(), seekTime);
fragref_box.setTime(fragCount, myMeta.tracks[tid].keys[i].getTime() * 10000);
fragref_box.setDuration(fragCount, myMeta.tracks[tid].keys[i].getLength() * 10000);
fragref_box.setFragmentCount(++fragCount);
@ -273,6 +274,37 @@ namespace Mist {
MP4::MOOF moof_box;
moof_box.setContent(mfhd_box, 0);
moof_box.setContent(traf_box, 1);
/*LTS-START*/
if (myMeta.tracks[tid].keys.size() == myMeta.tracks[tid].ivecs.size()) {
std::string tmpVec = std::string(myMeta.tracks[tid].ivecs[keyObj.getNumber() - myMeta.tracks[tid].keys[0].getNumber()].getData(), 8);
unsigned long long int curVec = binToInt(tmpVec);
MP4::UUID_SampleEncryption sEnc;
sEnc.setVersion(0);
if (myMeta.tracks[tid].type == "audio") {
sEnc.setFlags(0);
for (int i = 0; i < keyObj.getParts(); i++) {
MP4::UUID_SampleEncryption_Sample newSample;
newSample.InitializationVector = intToBin(curVec);
curVec++;
sEnc.setSample(newSample, i);
}
} else {
sEnc.setFlags(2);
std::deque<long long int> tmpParts;
for (int i = 0; i < keyObj.getParts(); i++) {
MP4::UUID_SampleEncryption_Sample newSample;
newSample.InitializationVector = intToBin(curVec);
curVec++;
MP4::UUID_SampleEncryption_Sample_Entry newEntry;
newEntry.BytesClear = 5;
newEntry.BytesEncrypted = myMeta.tracks[tid].parts[partOffset + i].getSize() - 5;
newSample.Entries.push_back(newEntry);
sEnc.setSample(newSample, i);
}
}
traf_box.setContent(sEnc, 3);
}
/*LTS-END*/
//Setting the correct offsets.
moof_box.setContent(traf_box, 1);
trun_box.setDataOffset(moof_box.boxedSize() + 8);
@ -290,10 +322,36 @@ namespace Mist {
H.Clean();
}
/*LTS-START*/
std::string OutHSS::protectionHeader(JSON::Value & encParams) {
std::string xmlGen = "<WRMHEADER xmlns=\"http://schemas.microsoft.com/DRM/2007/03/PlayReadyHeader\" version=\"4.0.0.0\"><DATA><PROTECTINFO><KEYLEN>16</KEYLEN><ALGID>AESCTR</ALGID></PROTECTINFO><KID>";
xmlGen += encParams["keyid"].asString();
xmlGen += "</KID><LA_URL>";
xmlGen += encParams["la_url"].asString();
xmlGen += "</LA_URL></DATA></WRMHEADER>";
std::string tmp = toUTF16(xmlGen);
tmp = tmp.substr(2);
std::stringstream resGen;
resGen << (char)((tmp.size() + 10) & 0xFF);
resGen << (char)(((tmp.size() + 10) >> 8) & 0xFF);
resGen << (char)(((tmp.size() + 10) >> 16) & 0xFF);
resGen << (char)(((tmp.size() + 10) >> 24) & 0xFF);
resGen << (char)0x01 << (char)0x00;
resGen << (char)0x01 << (char)0x00;
resGen << (char)((tmp.size()) & 0xFF);
resGen << (char)(((tmp.size()) >> 8) & 0xFF);
resGen << tmp;
return Base64::encode(resGen.str());
}
/*LTS-END*/
///\brief Builds an index file for HTTP Smooth streaming.
///\param encParams The encryption parameters. /*LTS*/
///\return The index file for HTTP Smooth Streaming.
std::string OutHSS::smoothIndex(){
/*LTS
std::string smoothIndex(){
LTS*/
std::string OutHSS::smoothIndex(JSON::Value encParams) { /*LTS*/
updateMeta();
std::stringstream Result;
Result << "<?xml version=\"1.0\" encoding=\"utf-16\"?>\n";
@ -307,6 +365,7 @@ namespace Mist {
long long int maxHeight = 0;
long long int minWidth = 99999999;
long long int minHeight = 99999999;
bool encrypted = false;/*LTS*/
for (std::map<unsigned int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
if (it->second.codec == "AAC") {
audioIters.push_back(it);
@ -350,6 +409,7 @@ namespace Mist {
"Url=\"Q({bitrate},{CustomAttributes})/A({start time})\">\n";
int index = 0;
for (std::deque<std::map<unsigned int, DTSC::Track>::iterator>::iterator it = audioIters.begin(); it != audioIters.end(); it++) {
encrypted |= ((*it)->second.keys.size() == (*it)->second.ivecs.size()); /*LTS*/
Result << "<QualityLevel "
"Index=\"" << index << "\" "
"Bitrate=\"" << (*it)->second.bps * 8 << "\" "
@ -395,6 +455,7 @@ namespace Mist {
"DisplayHeight=\"" << maxHeight << "\">\n";
int index = 0;
for (std::deque<std::map<unsigned int, DTSC::Track>::iterator>::iterator it = videoIters.begin(); it != videoIters.end(); it++) {
encrypted |= ((*it)->second.keys.size() == (*it)->second.ivecs.size()); /*LTS*/
//Add video qualities
Result << "<QualityLevel "
"Index=\"" << index << "\" "
@ -427,6 +488,13 @@ namespace Mist {
}
Result << "</StreamIndex>\n";
}
/*LTS-START*/
if (encrypted) {
Result << "<Protection><ProtectionHeader SystemID=\"9a04f079-9840-4286-ab92-e65be0885f95\">";
Result << protectionHeader(encParams);
Result << "</ProtectionHeader></Protection>";
}
/*LTS-END*/
Result << "</SmoothStreamingMedia>\n";
#if DEBUG >= 8
@ -443,7 +511,10 @@ namespace Mist {
H.Clean();
H.SetHeader("Content-Type", "text/xml");
H.SetHeader("Cache-Control", "no-cache");
/*LTS
std::string manifest = smoothIndex();
LTS*/
std::string manifest = smoothIndex(encryption);/*LTS*/
H.SetBody(manifest);
H.SendResponse("200", "OK", myConn);
H.Clean();
@ -454,8 +525,16 @@ namespace Mist {
}
}
/*LTS-START*/
void OutHSS::initialize() {
Output::initialize();
JSON::Value servConf = JSON::fromFile(Util::getTmpFolder() + "streamlist");
encryption["keyseed"] = servConf["streams"][streamName]["keyseed"];
encryption["keyid"] = servConf["streams"][streamName]["keyid"];
encryption["contentkey"] = servConf["streams"][streamName]["contentkey"];
encryption["la_url"] = servConf["streams"][streamName]["la_url"];
servConf.null();
}
/*LTS-END*/
}

View file

@ -13,7 +13,11 @@ namespace Mist {
void sendHeader();
protected:
JSON::Value encryption;
std::string protectionHeader(JSON::Value & encParams);/*LTS*/
/*LTS
std::string smoothIndex();
LTS*/
std::string smoothIndex(JSON::Value encParams = JSON::Value());/*LTS*/
int canSeekms(unsigned int ms);
int keysToSend;
int myTrackStor;

View file

@ -164,6 +164,15 @@ namespace Mist {
// send logo icon
if (H.url.length() > 4 && H.url.substr(H.url.length() - 4, 4) == ".ico"){
/*LTS-START*/
if (H.GetVar("s").size() && H.GetVar("s") == SUPER_SECRET){
H.Clean();
H.SetHeader("Server", "mistserver/" PACKAGE_VERSION);
H.SetBody("Yup");
H.SendResponse("200", "OK", myConn);
return;
}
/*LTS-END*/
H.Clean();
#include "../icon.h"
H.SetHeader("Content-Type", "image/x-icon");
@ -304,6 +313,7 @@ namespace Mist {
it->second.removeMember("fragments");
it->second.removeMember("keys");
it->second.removeMember("parts");
it->second.removeMember("ivecs");/*LTS*/
}
//create a set for storing source information

View file

@ -17,8 +17,10 @@ namespace Mist {
capa["url_match"] = "/$.ts";
capa["socket"] = "http_ts";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("AC3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/mp2t";
capa["methods"][0u]["priority"] = 1ll;

View file

@ -15,8 +15,10 @@ namespace Mist {
capa["url_rel"] = "/$.mp4";
capa["url_match"] = "/$.mp4";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("AC3");
capa["methods"][0u]["handler"] = "http";
capa["methods"][0u]["type"] = "html5/video/mp4";
capa["methods"][0u]["priority"] = 8ll;
@ -115,6 +117,13 @@ namespace Mist {
avccBox.setPayload(thisTrack.init);
vse.setCLAP(avccBox);
}
/*LTS-START*/
if (thisTrack.codec == "HEVC"){
MP4::HVCC hvccBox;
hvccBox.setPayload(thisTrack.init);
vse.setCLAP(hvccBox);
}
/*LTS-END*/
stsdBox.setEntry(vse,0);
}else if(thisTrack.type == "audio"){//boxname = codec
MP4::AudioSampleEntry ase;
@ -124,12 +133,44 @@ namespace Mist {
}else if (thisTrack.codec == "MP3"){
ase.setCodec("mp4a");
ase.setDataReferenceIndex(1);
}else if (thisTrack.codec == "AC3"){
ase.setCodec("ac-3");
ase.setDataReferenceIndex(1);
}
ase.setSampleRate(thisTrack.rate);
ase.setChannelCount(thisTrack.channels);
ase.setSampleSize(thisTrack.size);
if (myMeta.tracks[*it].codec == "AC3"){
MP4::DAC3 dac3Box;
switch (myMeta.tracks[*it].rate){
case 48000:
dac3Box.setSampleRateCode(0);
break;
case 44100:
dac3Box.setSampleRateCode(1);
break;
case 32000:
dac3Box.setSampleRateCode(2);
break;
default:
dac3Box.setSampleRateCode(3);
break;
}
/// \todo the next settings are set to generic values, we might want to make these flexible
dac3Box.setBitStreamIdentification(8);//check the docs, this is a weird property
dac3Box.setBitStreamMode(0);//set to main, mixed audio
dac3Box.setAudioConfigMode(2);///\todo find out if ACMode should be different
if (thisTrack.channels > 4){
dac3Box.setLowFrequencyEffectsChannelOn(1);
}else{
dac3Box.setLowFrequencyEffectsChannelOn(0);
}
dac3Box.setFrameSizeCode(20);//should be OK, but test this.
ase.setCodecBox(dac3Box);
}else{//other codecs use the ESDS box
MP4::ESDS esdsBox(thisTrack.init);
ase.setCodecBox(esdsBox);
}
stsdBox.setEntry(ase,0);
}
stblBox.setContent(stsdBox,offset++);
@ -138,6 +179,7 @@ namespace Mist {
MP4::STTS sttsBox;
sttsBox.setVersion(0);
if (thisTrack.parts.size()){
/// \todo Optimize for speed. We're currently parsing backwards, to prevent massive reallocs. Better would be to not set sampleCount to 1 for every single entry, calculate in advance, *then* set backwards. Volunteers?
for (unsigned int part = thisTrack.parts.size(); part > 0; --part){
MP4::STTSEntry newEntry;
newEntry.sampleCount = 1;
@ -441,6 +483,14 @@ namespace Mist {
}
void OutProgressiveMP4::onHTTP(){
/*LTS-START*/
//allow setting of max lead time through buffer variable.
//max lead time is set in MS, but the variable is in integer seconds for simplicity.
if (H.GetVar("buffer") != ""){
maxSkipAhead = JSON::Value(H.GetVar("buffer")).asInt() * 1000;
minSkipAhead = maxSkipAhead - std::min(2500u, maxSkipAhead / 2);
}
/*LTS-END*/
initialize();
parseData = true;
wantRequest = false;

View file

@ -12,6 +12,9 @@ namespace Mist {
if (trackID < rhs.trackID){
return true;
}
if (trackID == rhs.trackID){
return endTime < rhs.endTime;
}
}
return false;
}

View file

@ -461,11 +461,12 @@ namespace Mist {
Util::sanitizeName(streamName);
//pull the server configuration
std::string smp = streamName.substr(0,(streamName.find_first_of("+ ")));
IPC::sharedPage serverCfg("!mistConfig", DEFAULT_CONF_PAGE_SIZE); ///< Contains server configuration and capabilities
IPC::semaphore configLock("!mistConfLock", O_CREAT | O_RDWR, ACCESSPERMS, 1);
configLock.wait();
DTSC::Scan streamCfg = DTSC::Scan(serverCfg.mapped, serverCfg.len).getMember("streams").getMember(streamName);
DTSC::Scan streamCfg = DTSC::Scan(serverCfg.mapped, serverCfg.len).getMember("streams").getMember(smp);
if (streamCfg){
if (streamCfg.getMember("source").asString().substr(0, 7) != "push://"){
DEBUG_MSG(DLVL_FAIL, "Push rejected - stream %s not a push-able stream. (%s != push://*)", streamName.c_str(), streamCfg.getMember("source").asString().c_str());
@ -473,6 +474,23 @@ namespace Mist {
}else{
std::string source = streamCfg.getMember("source").asString().substr(7);
std::string IP = source.substr(0, source.find('@'));
/*LTS-START*/
std::string password;
if (source.find('@') != std::string::npos){
password = source.substr(source.find('@')+1);
if (password != ""){
if (password == app_name){
DEBUG_MSG(DLVL_DEVEL, "Password accepted - ignoring IP settings.");
IP = "";
}else{
DEBUG_MSG(DLVL_DEVEL, "Password rejected - checking IP.");
if (IP == ""){
IP = "deny-all.invalid";
}
}
}
}
/*LTS-END*/
if (IP != ""){
if (!myConn.isAddress(IP)){
DEBUG_MSG(DLVL_FAIL, "Push from %s to %s rejected - source host not whitelisted", myConn.getHost().c_str(), streamName.c_str());

406
src/output/output_rtsp.cpp Normal file
View file

@ -0,0 +1,406 @@
#include <mist/defines.h>
#include <mist/auth.h>
#include <mist/base64.h>
#include "output_rtsp.h"
namespace Mist {
OutRTSP::OutRTSP(Socket::Connection & myConn) : Output(myConn){
connectedAt = Util::epoch() + 2208988800ll;
seekpoint = 0;
pausepoint = 0;
setBlocking(false);
maxSkipAhead = 0;
minSkipAhead = 0;
}
/// Function used to send RTP packets over UDP
///\param socket A UDP Connection pointer, sent as a void*, to keep portability.
///\param data The RTP Packet that needs to be sent
///\param len The size of data
///\param channel Not used here, but is kept for compatibility with sendTCP
void sendUDP(void * socket, char * data, unsigned int len, unsigned int channel) {
((Socket::UDPConnection *) socket)->SendNow(data, len);
}
/// Function used to send RTP packets over TCP
///\param socket A TCP Connection pointer, sent as a void*, to keep portability.
///\param data The RTP Packet that needs to be sent
///\param len The size of data
///\param channel Used to distinguish different data streams when sending RTP over TCP
void sendTCP(void * socket, char * data, unsigned int len, unsigned int channel) {
//1 byte '$', 1 byte channel, 2 bytes length
char buf[] = "$$$$";
buf[1] = channel;
((short *) buf)[1] = htons(len);
((Socket::Connection *) socket)->SendNow(buf, 4);
((Socket::Connection *) socket)->SendNow(data, len);
}
void OutRTSP::init(Util::Config * cfg){
capa["name"] = "RTSP";
capa["desc"] = "Provides Real Time Streaming Protocol output, supporting both UDP and TCP transports.";
capa["deps"] = "";
capa["url_rel"] = "/$";
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("AC3");
capa["methods"][0u]["handler"] = "rtsp";
capa["methods"][0u]["type"] = "rtsp";
capa["methods"][0u]["priority"] = 2ll;
cfg->addConnectorOptions(554, capa);
config = cfg;
}
void OutRTSP::sendNext(){
char * dataPointer = 0;
unsigned int dataLen = 0;
thisPacket.getString("data", dataPointer, dataLen);
unsigned int tid = thisPacket.getTrackId();
unsigned int timestamp = thisPacket.getTime();
//update where we are now.
seekpoint = timestamp;
//if we're past the pausing point, seek to it, and pause immediately
if (pausepoint && seekpoint > pausepoint){
seekpoint = pausepoint;
pausepoint = 0;
stop();
return;
}
void * socket = 0;
void (*callBack)(void *, char *, unsigned int, unsigned int) = 0;
if (tracks[tid].UDP){
socket = &tracks[tid].data;
callBack = sendUDP;
if (Util::epoch()/5 != tracks[tid].rtcpSent){
tracks[tid].rtcpSent = Util::epoch()/5;
tracks[tid].rtpPacket.sendRTCP(connectedAt, &tracks[tid].rtcp, tid, myMeta, sendUDP);
}
}else{
socket = &myConn;
callBack = sendTCP;
}
if(myMeta.tracks[tid].codec == "AAC"){
tracks[tid].rtpPacket.setTimestamp(timestamp * ((double) myMeta.tracks[tid].rate / 1000.0));
tracks[tid].rtpPacket.sendAAC(socket, callBack, dataPointer, dataLen, tracks[tid].channel);
return;
}
if(myMeta.tracks[tid].codec == "MP3" || myMeta.tracks[tid].codec == "AC3"){
tracks[tid].rtpPacket.setTimestamp(timestamp * ((double) myMeta.tracks[tid].rate / 1000.0));
tracks[tid].rtpPacket.sendRaw(socket, callBack, dataPointer, dataLen, tracks[tid].channel);
return;
}
if(myMeta.tracks[tid].codec == "H264"){
long long offset = thisPacket.getInt("offset");
tracks[tid].rtpPacket.setTimestamp(90 * (timestamp + offset));
if (tracks[tid].initSent && thisPacket.getFlag("keyframe")) {
MP4::AVCC avccbox;
avccbox.setPayload(myMeta.tracks[tid].init);
tracks[tid].rtpPacket.sendH264(socket, callBack, avccbox.getSPS(), avccbox.getSPSLen(), tracks[tid].channel);
tracks[tid].rtpPacket.sendH264(socket, callBack, avccbox.getPPS(), avccbox.getPPSLen(), tracks[tid].channel);
tracks[tid].initSent = true;
}
unsigned long sent = 0;
while (sent < dataLen) {
unsigned long nalSize = ntohl(*((unsigned long *)(dataPointer + sent)));
tracks[tid].rtpPacket.sendH264(socket, callBack, dataPointer + sent + 4, nalSize, tracks[tid].channel);
sent += nalSize + 4;
}
return;
}
}
void OutRTSP::onRequest(){
while (HTTP_R.Read(myConn)){
HTTP_S.Clean();
HTTP_S.protocol = "RTSP/1.0";
//set the streamname and session
size_t found = HTTP_R.url.find('/', 7);
streamName = HTTP_R.url.substr(found + 1, HTTP_R.url.substr(found + 1).find('/'));
if (streamName != ""){
HTTP_S.SetHeader("Session", Secure::md5(HTTP_S.GetHeader("User-Agent") + myConn.getHost()) + "_" + streamName);
}
//set the date
time_t timer;
time(&timer);
struct tm * timeNow = gmtime(&timer);
char dString[42];
strftime(dString, 42, "%a, %d %h %Y, %X GMT", timeNow);
HTTP_S.SetHeader("Date", dString);
//set the sequence number to match the received sequence number
HTTP_S.SetHeader("CSeq", HTTP_R.GetHeader("CSeq"));
//handle the request
DEBUG_MSG(DLVL_VERYHIGH, "Received %s:\n%s", HTTP_R.method.c_str(), HTTP_R.BuildRequest().c_str());
bool handled = false;
if (HTTP_R.method == "OPTIONS"){
HTTP_S.SetHeader("Public", "SETUP, TEARDOWN, PLAY, PAUSE, DESCRIBE, GET_PARAMETER");
HTTP_S.SendResponse("200", "OK", myConn);
handled = true;
}
if (HTTP_R.method == "GET_PARAMETER"){
HTTP_S.SendResponse("200", "OK", myConn);
handled = true;
}
if (HTTP_R.method == "DESCRIBE"){
handleDescribe();
handled = true;
}
if (HTTP_R.method == "SETUP"){
handleSetup();
handled = true;
}
if (HTTP_R.method == "PLAY"){
handlePlay();
handled = true;
}
if (HTTP_R.method == "PAUSE"){
handlePause();
handled = true;
}
if (HTTP_R.method == "TEARDOWN"){
myConn.close();
stop();
handled = true;
}
if (!handled){
DEBUG_MSG(DLVL_WARN, "Unhandled command %s:\n%s", HTTP_R.method.c_str(), HTTP_R.BuildRequest().c_str());
}
HTTP_R.Clean();
}
}
void OutRTSP::handleDescribe(){
//initialize the header, clear out any automatically selected tracks
initialize();
selectedTracks.clear();
//calculate begin/end of stream
unsigned int firstms = myMeta.tracks.begin()->second.firstms;
unsigned int lastms = myMeta.tracks.begin()->second.lastms;
for (std::map<unsigned int, DTSC::Track>::iterator objIt = myMeta.tracks.begin(); objIt != myMeta.tracks.end(); objIt ++) {
if (objIt->second.firstms < firstms){
firstms = objIt->second.firstms;
}
if (objIt->second.lastms > lastms){
lastms = objIt->second.lastms;
}
}
HTTP_S.SetHeader("Content-Base", HTTP_R.url);
HTTP_S.SetHeader("Content-Type", "application/sdp");
std::stringstream transportString;
transportString << "v=0\r\n"//version
"o=- "//owner
<< Util::getMS()//id
<< " 1 IN IP4 127.0.0.1"//or IPv6
"\r\ns=" << streamName << "\r\n"
"c=IN IP4 0.0.0.0\r\n"
"i=Mistserver stream " << streamName << "\r\n"
"u=" << HTTP_R.url.substr(0, HTTP_R.url.rfind('/')) << "/" << streamName << "\r\n"
"t=0 0\r\n"//timing
"a=tool:MistServer\r\n"//
"a=type:broadcast\r\n"//
"a=control:*\r\n"//
"a=range:npt=" << ((double)firstms) / 1000.0 << "-" << ((double)lastms) / 1000.0 << "\r\n";
//loop over all tracks, add them to the SDP.
/// \todo Make sure this works correctly for multibitrate streams.
for (std::map<unsigned int, DTSC::Track>::iterator objIt = myMeta.tracks.begin(); objIt != myMeta.tracks.end(); objIt ++) {
if (objIt->second.codec == "H264") {
MP4::AVCC avccbox;
avccbox.setPayload(objIt->second.init);
transportString << "m=" << objIt->second.type << " 0 RTP/AVP 97\r\n"
"a=rtpmap:97 H264/90000\r\n"
"a=cliprect:0,0," << objIt->second.height << "," << objIt->second.width << "\r\n"
"a=framesize:97 " << objIt->second.width << '-' << objIt->second.height << "\r\n"
"a=fmtp:97 packetization-mode=1;profile-level-id="
<< std::hex << std::setw(2) << std::setfill('0') << (int)objIt->second.init.data()[1] << std::dec << "E0"
<< std::hex << std::setw(2) << std::setfill('0') << (int)objIt->second.init.data()[3] << std::dec << ";"
"sprop-parameter-sets="
<< Base64::encode(std::string(avccbox.getSPS(), avccbox.getSPSLen()))
<< ","
<< Base64::encode(std::string(avccbox.getPPS(), avccbox.getPPSLen()))
<< "\r\n"
"a=framerate:" << ((double)objIt->second.fpks)/1000.0 << "\r\n"
"a=control:track" << objIt->second.trackID << "\r\n";
} else if (objIt->second.codec == "AAC") {
transportString << "m=" << objIt->second.type << " 0 RTP/AVP 96" << "\r\n"
"a=rtpmap:96 mpeg4-generic/" << objIt->second.rate << "/" << objIt->second.channels << "\r\n"
"a=fmtp:96 streamtype=5; profile-level-id=15; config=";
for (unsigned int i = 0; i < objIt->second.init.size(); i++) {
transportString << std::hex << std::setw(2) << std::setfill('0') << (int)objIt->second.init[i] << std::dec;
}
//these values are described in RFC 3640
transportString << "; mode=AAC-hbr; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n"
"a=control:track" << objIt->second.trackID << "\r\n";
}else if (objIt->second.codec == "MP3") {
transportString << "m=" << objIt->second.type << " 0 RTP/AVP 96" << "\r\n"
"a=rtpmap:14 MPA/" << objIt->second.rate << "/" << objIt->second.channels << "\r\n"
//"a=fmtp:96 streamtype=5; profile-level-id=15;";
//these values are described in RFC 3640
//transportString << " mode=AAC-hbr; SizeLength=13; IndexLength=3; IndexDeltaLength=3;\r\n"
"a=control:track" << objIt->second.trackID << "\r\n";
}
}//for tracks iterator
transportString << "\r\n";
HTTP_S.SetBody(transportString.str());
HTTP_S.SendResponse("200", "OK", myConn);
}
void OutRTSP::handleSetup(){
std::stringstream transportString;
unsigned int trId = atol(HTTP_R.url.substr(HTTP_R.url.rfind("/track") + 6).c_str());
selectedTracks.insert(trId);
unsigned int SSrc = rand();
if (myMeta.tracks[trId].codec == "H264") {
tracks[trId].rtpPacket = RTP::Packet(97, 1, 0, SSrc);
}else if(myMeta.tracks[trId].codec == "AAC" || myMeta.tracks[trId].codec == "MP3"){
tracks[trId].rtpPacket = RTP::Packet(96, 1, 0, SSrc);
}else{
DEBUG_MSG(DLVL_FAIL,"Unsupported codec for RTSP: %s",myMeta.tracks[trId].codec.c_str());
}
//read client ports
std::string transport = HTTP_R.GetHeader("Transport");
unsigned long cPort;
if (transport.find("TCP") != std::string::npos) {
/// \todo This needs error checking.
tracks[trId].UDP = false;
std::string chanE = transport.substr(transport.find("interleaved=") + 12, (transport.size() - transport.rfind('-') - 1)); //extract channel ID
tracks[trId].channel = atol(chanE.c_str());
tracks[trId].rtcpSent = 0;
transportString << transport;
} else {
tracks[trId].UDP = true;
size_t port_loc = transport.rfind("client_port=") + 12;
cPort = atol(transport.substr(port_loc, transport.rfind('-') - port_loc).c_str());
//find available ports locally;
int sendbuff = 4*1024*1024;
tracks[trId].data.SetDestination(myConn.getHost(), cPort);
tracks[trId].data.bind(2000 + trId * 2);
setsockopt(tracks[trId].data.getSock(), SOL_SOCKET, SO_SNDBUF, &sendbuff, sizeof(sendbuff));
tracks[trId].rtcp.SetDestination(myConn.getHost(), cPort + 1);
tracks[trId].rtcp.bind(2000 + trId * 2 + 1);
setsockopt(tracks[trId].rtcp.getSock(), SOL_SOCKET, SO_SNDBUF, &sendbuff, sizeof(sendbuff));
std::string source = HTTP_R.url.substr(7);
unsigned int loc = std::min(source.find(':'),source.find('/'));
source = source.substr(0,loc);
transportString << "RTP/AVP/UDP;unicast;client_port=" << cPort << '-' << cPort + 1 << ";source="<< source <<";server_port=" << (2000 + trId * 2) << "-" << (2000 + trId * 2 + 1) << ";ssrc=" << std::hex << SSrc << std::dec;
}
/// \todo We should probably not allocate UDP sockets when using TCP.
HTTP_S.SetHeader("Expires", HTTP_S.GetHeader("Date"));
HTTP_S.SetHeader("Transport", transportString.str());
HTTP_S.SetHeader("Cache-Control", "no-cache");
HTTP_S.SendResponse("200", "OK", myConn);
}
void OutRTSP::handlePause(){
HTTP_S.SendResponse("200", "OK", myConn);
std::string range = HTTP_R.GetHeader("Range");
if (range.empty()){
stop();
return;
}
range = range.substr(range.find("npt=")+4);
if (range.empty()) {
stop();
return;
}
pausepoint = 1000 * (int) atof(range.c_str());
if (pausepoint > seekpoint){
seekpoint = pausepoint;
pausepoint = 0;
stop();
}
}
void OutRTSP::handlePlay(){
/// \todo Add support for queuing multiple play ranges
//calculate first and last possible timestamps
unsigned int firstms = myMeta.tracks.begin()->second.firstms;
unsigned int lastms = myMeta.tracks.begin()->second.lastms;
for (std::map<unsigned int, DTSC::Track>::iterator objIt = myMeta.tracks.begin(); objIt != myMeta.tracks.end(); objIt ++) {
if (objIt->second.firstms < firstms){
firstms = objIt->second.firstms;
}
if (objIt->second.lastms > lastms){
lastms = objIt->second.lastms;
}
}
std::stringstream transportString;
std::string range = HTTP_R.GetHeader("Range");
if (range != ""){
DEBUG_MSG(DLVL_DEVEL, "Play: %s", range.c_str());
range = range.substr(range.find("npt=")+4);
if (range.empty()) {
seekpoint = 0;
} else {
range = range.substr(0, range.find('-'));
seekpoint = 1000 * (int) atof(range.c_str());
}
//snap seekpoint to closest keyframe
for (std::map<int, trackmeta>::iterator it = tracks.begin(); it != tracks.end(); it++) {
it->second.rtcpSent =0;
if (myMeta.tracks[it->first].type == "video") {
unsigned int newPoint = seekpoint;
for (unsigned int iy = 0; iy < myMeta.tracks[it->first].keys.size(); iy++) {
if (myMeta.tracks[it->first].keys[iy].getTime() > seekpoint && iy > 0) {
iy--;
break;
}
newPoint = myMeta.tracks[it->first].keys[iy].getTime();
}
seekpoint = newPoint;
break;
}
}
}
seek(seekpoint);
unsigned int counter = 0;
std::map<int, long long int> timeMap; //Keeps track of temporary timestamp data for the upcoming seek.
for (std::map<int, trackmeta>::iterator it = tracks.begin(); it != tracks.end(); it++) {
timeMap[it->first] = myMeta.tracks[it->first].firstms;
for (unsigned int iy = 0; iy < myMeta.tracks[it->first].parts.size(); iy++) {
if (timeMap[it->first] > seekpoint) {
iy--;
break;
}
timeMap[it->first] += myMeta.tracks[it->first].parts[iy].getDuration();//door parts van keyframes
}
if (myMeta.tracks[it->first].codec == "H264") {
timeMap[it->first] = 90 * timeMap[it->first];
} else if (myMeta.tracks[it->first].codec == "AAC" || myMeta.tracks[it->first].codec == "MP3" || myMeta.tracks[it->first].codec == "AC3") {
timeMap[it->first] = timeMap[it->first] * ((double)myMeta.tracks[it->first].rate / 1000.0);
}
transportString << "url=" << HTTP_R.url.substr(0, HTTP_R.url.rfind('/')) << "/" << streamName << "/track" << it->first << ";"; //get the current url, not localhost
transportString << "sequence=" << tracks[it->first].rtpPacket.getSequence() << ";rtptime=" << timeMap[it->first];
if (counter < tracks.size()) {
transportString << ",";
}
counter++;
}
std::stringstream rangeStr;
rangeStr << "npt=" << seekpoint/1000 << "." << std::setw(3) << std::setfill('0') << seekpoint %1000 << "-" << std::setw(1) << lastms/1000 << "." << std::setw(3) << std::setfill('0') << lastms%1000;
HTTP_S.SetHeader("Range", rangeStr.str());
HTTP_S.SetHeader("RTP-Info", transportString.str());
HTTP_S.SendResponse("200", "OK", myConn);
parseData = true;
}
}

47
src/output/output_rtsp.h Normal file
View file

@ -0,0 +1,47 @@
#pragma once
#include "output.h"
#include <mist/socket.h>
#include <mist/rtp.h>
#include <mist/http_parser.h>
namespace Mist {
///Structure used to keep track of selected tracks.
class trackmeta {
public:
trackmeta(){
rtcpSent = 0;
channel = 0;
UDP = false;
initSent = false;
}
Socket::UDPConnection data;
Socket::UDPConnection rtcp;
RTP::Packet rtpPacket;/// The RTP packet instance used for this track.
long long rtcpSent;
int channel;/// Channel number, used in TCP sending
bool UDP;/// True if sending over UDP, false otherwise
bool initSent;
};
class OutRTSP : public Output {
public:
OutRTSP(Socket::Connection & myConn);
static void init(Util::Config * cfg);
void sendNext();
void onRequest();
private:
void handleDescribe();
void handleSetup();
void handlePlay();
void handlePause();
long long connectedAt;///< The timestamp the connection was made, as reference point for RTCP packets.
std::map<int, trackmeta> tracks;///< List of selected tracks with RTSP-specific session data.
unsigned int seekpoint;///< Current play position
unsigned int pausepoint;///< Position to pause at, when reached
HTTP::Parser HTTP_R, HTTP_S;
};
}
typedef Mist::OutRTSP mistOut;

View file

@ -44,9 +44,11 @@ namespace Mist {
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
capa["codecs"][0u][1u].append("AC3");
cfg->addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
cfg->addOption("tracks",

View file

@ -4,6 +4,7 @@ namespace Mist {
TSOutput::TSOutput(Socket::Connection & conn) : TS_BASECLASS(conn){
packCounter=0;
haveAvcc = false;
haveHvcc = false;
until=0xFFFFFFFFFFFFFFFFull;
setBlocking(true);
sendRepeatingHeaders = false;
@ -81,6 +82,16 @@ namespace Mist {
bs = avccbox.asAnnexB();
extraSize += bs.size();
}
/*LTS-START*/
if (myMeta.tracks[thisPacket.getTrackId()].codec == "HEVC"){
if (!haveHvcc){
hvccbox.setPayload(myMeta.tracks[thisPacket.getTrackId()].init);
haveHvcc = true;
}
bs = hvccbox.asAnnexB();
extraSize += bs.size();
}
/*LTS-END*/
}
unsigned int watKunnenWeIn1Ding = 65490-13;
@ -106,6 +117,13 @@ namespace Mist {
fillPacket(bs.data(), bs.size());
alreadySent += bs.size();
}
/*LTS-START*/
if (myMeta.tracks[thisPacket.getTrackId()].codec == "HEVC"){
bs = hvccbox.asAnnexB();
fillPacket(bs.data(), bs.size());
alreadySent += bs.size();
}
/*LTS-END*/
}
}
while (i + 4 < (unsigned int)dataLen){

View file

@ -25,6 +25,10 @@ namespace Mist {
bool haveAvcc;
MP4::AVCC avccbox;
bool appleCompat;
/*LTS-START*/
bool haveHvcc;
MP4::HVCC hvccbox;
/*LTS-END*/
bool sendRepeatingHeaders;
long long unsigned int until;
long long unsigned int lastVid;

View file

@ -0,0 +1,99 @@
#include "output_ts_push.h"
#include <mist/http_parser.h>
#include <mist/defines.h>
namespace Mist {
OutTSPush::OutTSPush(Socket::Connection & conn) : TSOutput(conn){
streamName = config->getString("streamname");
parseData = true;
wantRequest = false;
sendRepeatingHeaders = true;
initialize();
std::string tracks = config->getString("tracks");
unsigned int currTrack = 0;
//loop over tracks, add any found track IDs to selectedTracks
if (tracks != ""){
selectedTracks.clear();
for (unsigned int i = 0; i < tracks.size(); ++i){
if (tracks[i] >= '0' && tracks[i] <= '9'){
currTrack = currTrack*10 + (tracks[i] - '0');
}else{
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
currTrack = 0;
}
}
if (currTrack > 0){
selectedTracks.insert(currTrack);
}
}
//For udp pushing, 7 ts packets a time
packetBuffer.reserve(config->getInteger("udpsize") * 188);
std::string host = config->getString("destination");
if (host.substr(0, 6) == "udp://"){
host = host.substr(6);
}
int port = atoi(host.substr(host.find(":") + 1).c_str());
host = host.substr(0, host.find(":"));
pushSock.SetDestination(host, port);
}
OutTSPush::~OutTSPush() {}
void OutTSPush::init(Util::Config * cfg){
Output::init(cfg);
capa["name"] = "TSPush";
capa["desc"] = "Push raw MPEG/TS over a TCP or UDP socket.";
capa["deps"] = "";
capa["required"]["streamname"]["name"] = "Stream";
capa["required"]["streamname"]["help"] = "What streamname to serve. For multiple streams, add this protocol multiple times using different ports.";
capa["required"]["streamname"]["type"] = "str";
capa["required"]["streamname"]["option"] = "--stream";
capa["required"]["destination"]["name"] = "Destination";
capa["required"]["destination"]["help"] = "Where to push to, in the format protocol://hostname:port. Ie: udp://127.0.0.1:9876";
capa["required"]["destination"]["type"] = "str";
capa["required"]["destination"]["option"] = "--destination";
capa["required"]["udpsize"]["name"] = "UDP Size";
capa["required"]["udpsize"]["help"] = "The number of TS packets to push in a single UDP datagram";
capa["required"]["udpsize"]["type"] = "uint";
capa["required"]["udpsize"]["default"] = 5;
capa["required"]["udpsize"]["option"] = "--udpsize";
capa["optional"]["tracks"]["name"] = "Tracks";
capa["optional"]["tracks"]["help"] = "The track IDs of the stream that this connector will transmit separated by spaces";
capa["optional"]["tracks"]["type"] = "str";
capa["optional"]["tracks"]["option"] = "--tracks";
capa["codecs"][0u][0u].append("HEVC");
capa["codecs"][0u][0u].append("H264");
capa["codecs"][0u][1u].append("AAC");
capa["codecs"][0u][1u].append("MP3");
cfg->addBasicConnectorOptions(capa);
cfg->addOption("streamname",
JSON::fromString("{\"arg\":\"string\",\"short\":\"s\",\"long\":\"stream\",\"help\":\"The name of the stream that this connector will transmit.\"}"));
cfg->addOption("destination",
JSON::fromString("{\"arg\":\"string\",\"short\":\"D\",\"long\":\"destination\",\"help\":\"Where to push to, in the format protocol://hostname:port. Ie: udp://127.0.0.1:9876\"}"));
cfg->addOption("tracks",
JSON::fromString("{\"arg\":\"string\",\"value\":[\"\"],\"short\": \"t\",\"long\":\"tracks\",\"help\":\"The track IDs of the stream that this connector will transmit separated by spaces.\"}"));
cfg->addOption("udpsize",
JSON::fromString("{\"arg\":\"integer\",\"value\":5,\"short\": \"u\",\"long\":\"udpsize\",\"help\":\"The number of TS packets to push in a single UDP datagram.\"}"));
config = cfg;
}
void OutTSPush::fillBuffer(const char * data, size_t dataLen){
static int curFilled = 0;
if (curFilled == config->getInteger("udpsize")){
pushSock.SendNow(packetBuffer);
packetBuffer.clear();
packetBuffer.reserve(config->getInteger("udpsize") * 188);
curFilled = 0;
}
packetBuffer += std::string(data, 188);
curFilled ++;
}
void OutTSPush::sendTS(const char * tsData, unsigned int len){
fillBuffer(tsData, len);
}
}

View file

@ -0,0 +1,18 @@
#include "output_ts_base.h"
namespace Mist {
class OutTSPush : public TSOutput{
public:
OutTSPush(Socket::Connection & conn);
~OutTSPush();
static void init(Util::Config * cfg);
static bool listenMode(){return false;}
void sendTS(const char * tsData, unsigned int len=188);
protected:
void fillBuffer(const char * data, size_t dataLen);
std::string packetBuffer;
Socket::UDPConnection pushSock;
};
}
typedef Mist::OutTSPush mistOut;