Fix for using old files.
This commit is contained in:
parent
c2a0d95f2b
commit
1743dcb93d
3 changed files with 103 additions and 27 deletions
|
@ -69,6 +69,7 @@ namespace Controller {
|
|||
for(JSON::ObjIter trackIt = data["meta"]["tracks"].ObjBegin(); trackIt != data["meta"]["tracks"].ObjEnd(); trackIt++){
|
||||
trackIt->second.removeMember("fragments");
|
||||
trackIt->second.removeMember("keys");
|
||||
trackIt->second.removeMember("keysizes");
|
||||
trackIt->second.removeMember("parts");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ namespace Mist {
|
|||
singleton->userCallback(data, 30, id);//call the userCallback for this input
|
||||
}
|
||||
|
||||
Input::Input(Util::Config * cfg) {
|
||||
Input::Input(Util::Config * cfg){
|
||||
config = cfg;
|
||||
JSON::Value option;
|
||||
option["long"] = "json";
|
||||
|
@ -73,7 +73,7 @@ namespace Mist {
|
|||
}
|
||||
std::string headerFile = streamFile + ".dtsh";
|
||||
FILE * tmp = fopen(headerFile.c_str(),"r");
|
||||
if (tmp == NULL) {
|
||||
if (tmp == NULL){
|
||||
DEBUG_MSG(DLVL_HIGH, "Can't open header: %s. Assuming all is fine.", headerFile.c_str() );
|
||||
return;
|
||||
}
|
||||
|
@ -90,24 +90,24 @@ namespace Mist {
|
|||
int timeStream = bufStream.st_mtime;
|
||||
int timeHeader = bufHeader.st_mtime;
|
||||
fclose(tmp);
|
||||
if (timeHeader < timeStream) {
|
||||
if (timeHeader < timeStream){
|
||||
//delete filename
|
||||
INFO_MSG("Overwriting outdated DTSH header file: %s ",headerFile.c_str());
|
||||
remove(headerFile.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
int Input::run() {
|
||||
if (config->getBool("json")) {
|
||||
int Input::run(){
|
||||
if (config->getBool("json")){
|
||||
std::cout << capa.toString() << std::endl;
|
||||
return 0;
|
||||
}
|
||||
if (!setup()) {
|
||||
if (!setup()){
|
||||
std::cerr << config->getString("cmd") << " setup failed." << std::endl;
|
||||
return 0;
|
||||
}
|
||||
checkHeaderTimes(config->getString("input"));
|
||||
if (!readHeader()) {
|
||||
if (!readHeader()){
|
||||
std::cerr << "Reading header for " << config->getString("input") << " failed." << std::endl;
|
||||
return 0;
|
||||
}
|
||||
|
@ -205,23 +205,94 @@ namespace Mist {
|
|||
|
||||
void Input::parseHeader(){
|
||||
DEBUG_MSG(DLVL_DONTEVEN,"Parsing the header");
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
|
||||
bool newData = true;
|
||||
for (int i = 0; i < it->second.keys.size(); i++){
|
||||
if (newData){
|
||||
//i+1 because keys are 1-indexed
|
||||
pagesByTrack[it->first][i+1].firstTime = it->second.keys[i].getTime();
|
||||
newData = false;
|
||||
}
|
||||
pagesByTrack[it->first].rbegin()->second.keyNum++;
|
||||
pagesByTrack[it->first].rbegin()->second.partNum += it->second.keys[i].getParts();
|
||||
pagesByTrack[it->first].rbegin()->second.dataSize += it->second.keySizes[i];
|
||||
if (pagesByTrack[it->first].rbegin()->second.dataSize > 8 * 1024 * 1024){
|
||||
newData = true;
|
||||
}
|
||||
bool hasKeySizes = true;
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
|
||||
if (!it->second.keySizes.size()){
|
||||
hasKeySizes = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++) {
|
||||
if (hasKeySizes){
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
|
||||
bool newData = true;
|
||||
for (int i = 0; i < it->second.keys.size(); i++){
|
||||
if (newData){
|
||||
//i+1 because keys are 1-indexed
|
||||
pagesByTrack[it->first][i+1].firstTime = it->second.keys[i].getTime();
|
||||
newData = false;
|
||||
}
|
||||
pagesByTrack[it->first].rbegin()->second.keyNum++;
|
||||
pagesByTrack[it->first].rbegin()->second.partNum += it->second.keys[i].getParts();
|
||||
pagesByTrack[it->first].rbegin()->second.dataSize += it->second.keySizes[i];
|
||||
if (pagesByTrack[it->first].rbegin()->second.dataSize > 8 * 1024 * 1024){
|
||||
newData = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}else{
|
||||
selectedTracks.clear();
|
||||
std::stringstream trackSpec;
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
|
||||
DEBUG_MSG(DLVL_VERYHIGH, "Track %d encountered", it->first);
|
||||
if (trackSpec.str() != ""){
|
||||
trackSpec << " ";
|
||||
}
|
||||
trackSpec << it->first;
|
||||
DEBUG_MSG(DLVL_VERYHIGH, "Trackspec now %s", trackSpec.str().c_str());
|
||||
for (std::deque<DTSC::Key>::iterator it2 = it->second.keys.begin(); it2 != it->second.keys.end(); it2++){
|
||||
keyTimes[it->first].insert(it2->getTime());
|
||||
}
|
||||
}
|
||||
trackSelect(trackSpec.str());
|
||||
|
||||
std::map<int, DTSCPageData> curData;
|
||||
std::map<int, booking> bookKeeping;
|
||||
|
||||
seek(0);
|
||||
getNext();
|
||||
|
||||
while(lastPack){//loop through all
|
||||
int tid = lastPack.getTrackId();
|
||||
if (!tid){
|
||||
getNext(false);
|
||||
continue;
|
||||
}
|
||||
if (!bookKeeping.count(tid)){
|
||||
bookKeeping[tid].first = 1;
|
||||
bookKeeping[tid].curPart = 0;
|
||||
bookKeeping[tid].curKey = 0;
|
||||
|
||||
curData[tid].lastKeyTime = 0xFFFFFFFF;
|
||||
curData[tid].keyNum = 1;
|
||||
curData[tid].partNum = 0;
|
||||
curData[tid].dataSize = 0;
|
||||
curData[tid].curOffset = 0;
|
||||
curData[tid].firstTime = myMeta.tracks[tid].keys[0].getTime();
|
||||
|
||||
char tmpId[20];
|
||||
sprintf(tmpId, "%d", tid);
|
||||
indexPages[tid].init(config->getString("streamname") + tmpId, 8 * 1024, true);//Pages of 8kb in size, room for 512 parts.
|
||||
}
|
||||
if (myMeta.tracks[tid].keys[bookKeeping[tid].curKey].getParts() == curData[tid].partNum){
|
||||
if (curData[tid].dataSize > 8 * 1024 * 1024){
|
||||
pagesByTrack[tid][bookKeeping[tid].first] = curData[tid];
|
||||
bookKeeping[tid].first += curData[tid].keyNum;
|
||||
curData[tid].keyNum = 0;
|
||||
curData[tid].dataSize = 0;
|
||||
curData[tid].firstTime = myMeta.tracks[tid].keys[bookKeeping[tid].curKey].getTime();
|
||||
}
|
||||
bookKeeping[tid].curKey++;
|
||||
curData[tid].keyNum++;
|
||||
curData[tid].partNum = 0;
|
||||
}
|
||||
curData[tid].dataSize += lastPack.getDataLen();
|
||||
curData[tid].partNum ++;
|
||||
bookKeeping[tid].curPart ++;
|
||||
DEBUG_MSG(DLVL_INSANE, "Track %ld:%llu (%db) on page %d, being part %d of key %d", lastPack.getTrackId(), lastPack.getTime(), lastPack.getDataLen(), bookKeeping[tid].first, curData[tid].partNum, curData[tid].keyNum);
|
||||
getNext(false);
|
||||
}
|
||||
}
|
||||
for (std::map<int, DTSC::Track>::iterator it = myMeta.tracks.begin(); it != myMeta.tracks.end(); it++){
|
||||
DEBUG_MSG(DLVL_MEDIUM, "Track %d (%s) split into %lu pages", it->first, myMeta.tracks[it->first].codec.c_str(), pagesByTrack[it->first].size());
|
||||
for (std::map<int, DTSCPageData>::iterator it2 = pagesByTrack[it->first].begin(); it2 != pagesByTrack[it->first].end(); it2++){
|
||||
DEBUG_MSG(DLVL_VERYHIGH, "Page %u-%u, (%llu bytes)", it2->first, it2->first + it2->second.keyNum - 1, it2->second.dataSize);
|
||||
|
@ -309,22 +380,22 @@ namespace Mist {
|
|||
return true;
|
||||
}
|
||||
|
||||
void Input::play(int until) {
|
||||
void Input::play(int until){
|
||||
playing = -1;
|
||||
playUntil = until;
|
||||
initialTime = 0;
|
||||
benchMark = Util::getMS();
|
||||
}
|
||||
|
||||
void Input::playOnce() {
|
||||
if (playing <= 0) {
|
||||
void Input::playOnce(){
|
||||
if (playing <= 0){
|
||||
playing = 1;
|
||||
}
|
||||
++playing;
|
||||
benchMark = Util::getMS();
|
||||
}
|
||||
|
||||
void Input::quitPlay() {
|
||||
void Input::quitPlay(){
|
||||
playing = 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,11 @@ namespace Mist {
|
|||
DTSC::File tmp(config->getString("input") + ".dtsh");
|
||||
if (tmp){
|
||||
myMeta = tmp.getMeta();
|
||||
return true;
|
||||
if (myMeta){
|
||||
return true;
|
||||
}else{
|
||||
myMeta = DTSC::Meta();
|
||||
}
|
||||
}
|
||||
//Create header file from FLV data
|
||||
fseek(inFile, 13, SEEK_SET);
|
||||
|
|
Loading…
Add table
Reference in a new issue