/// Adds the default connector options. Also updates the capabilities structure with the default options. /// Besides the options addBasicConnectorOptions adds, this function also adds port and interface options. void Util::Config::addConnectorOptions(int port, JSON::Value & capabilities){ JSON::Value option; option.null(); option["long"] = "port"; option["short"] = "p"; option["arg"] = "integer"; option["help"] = "TCP port to listen on"; option["value"].append((long long)port); addOption("listen_port", option); capabilities["optional"]["port"]["name"] = "TCP port"; capabilities["optional"]["port"]["help"] = "TCP port to listen on - default if unprovided is "+option["value"][0u].asString(); capabilities["optional"]["port"]["type"] = "uint"; capabilities["optional"]["port"]["option"] = "--port"; capabilities["optional"]["port"]["default"] = option["value"][0u]; option.null(); option["long"] = "interface"; option["short"] = "i"; option["arg"] = "string"; option["help"] = "Interface address to listen on, or 0.0.0.0 for all available interfaces."; option["value"].append("0.0.0.0"); addOption("listen_interface", option); capabilities["optional"]["interface"]["name"] = "Interface"; capabilities["optional"]["interface"]["help"] = "Address of the interface to listen on - default if unprovided is all interfaces"; capabilities["optional"]["interface"]["option"] = "--interface"; capabilities["optional"]["interface"]["type"] = "str"; addBasicConnectorOptions(capabilities); } //addConnectorOptions
/// Adds the default connector options. Also updates the capabilities structure with the default options. void Util::Config::addBasicConnectorOptions(JSON::Value & capabilities){ JSON::Value option; option.null(); option["long"] = "username"; option["short"] = "u"; option["arg"] = "string"; option["help"] = "Username to drop privileges to, or root to not drop provileges."; option["value"].append("root"); addOption("username", option); capabilities["optional"]["username"]["name"] = "Username"; capabilities["optional"]["username"]["help"] = "Username to drop privileges to - default if unprovided means do not drop privileges"; capabilities["optional"]["username"]["option"] = "--username"; capabilities["optional"]["username"]["type"] = "str"; option.null(); option["long"] = "daemon"; option["short"] = "d"; option["long_off"] = "nodaemon"; option["short_off"] = "n"; option["help"] = "Whether or not to daemonize the process after starting."; option["value"].append(1ll); addOption("daemonize", option); option.null(); option["long"] = "json"; option["short"] = "j"; option["help"] = "Output connector info in JSON format, then exit."; option["value"].append(0ll); addOption("json", option); }
void inputFLV::getNext(bool smart) { static JSON::Value thisPack; static AMF::Object amf_storage; thisPack.null(); long long int lastBytePos = ftell(inFile); FLV::Tag tmpTag; while (!feof(inFile) && !FLV::Parse_Error){ if (tmpTag.FileLoader(inFile)){ thisPack = tmpTag.toJSON(myMeta, amf_storage); thisPack["bpos"] = lastBytePos; if ( !selectedTracks.count(thisPack["trackid"].asInt())){ getNext(); } break; } } if (FLV::Parse_Error){ FAIL_MSG("FLV error: %s", FLV::Error_Str.c_str()); thisPack.null(); thisPacket.null(); return; } std::string tmpStr = thisPack.toNetPacked(); thisPacket.reInit(tmpStr.data(), tmpStr.size()); }
bool inputFLV::readHeader() { JSON::Value lastPack; if (!inFile) { return false; } //See whether a separate header file exists. DTSC::File tmp(config->getString("input") + ".dtsh"); if (tmp){ myMeta = tmp.getMeta(); return true; } //Create header file from FLV data fseek(inFile, 13, SEEK_SET); FLV::Tag tmpTag; long long int lastBytePos = 13; while (!feof(inFile) && !FLV::Parse_Error){ if (tmpTag.FileLoader(inFile)){ lastPack.null(); lastPack = tmpTag.toJSON(myMeta); lastPack["bpos"] = lastBytePos; myMeta.update(lastPack); lastBytePos = ftell(inFile); } } if (FLV::Parse_Error){ std::cerr << FLV::Error_Str << std::endl; return false; } std::ofstream oFile(std::string(config->getString("input") + ".dtsh").c_str()); oFile << myMeta.toJSON().toNetPacked(); oFile.close(); return true; }
Input::Input(Util::Config * cfg) : InOutBase() { config = cfg; standAlone = true; JSON::Value option; option["long"] = "json"; option["short"] = "j"; option["help"] = "Output MistIn info in JSON format, then exit"; option["value"].append(0ll); config->addOption("json", option); option.null(); option["arg_num"] = 1ll; option["arg"] = "string"; option["help"] = "Name of the input file or - for stdin"; option["value"].append("-"); config->addOption("input", option); option.null(); option["arg_num"] = 2ll; option["arg"] = "string"; option["help"] = "Name of the output file or - for stdout"; option["value"].append("-"); config->addOption("output", option); option.null(); option["arg"] = "string"; option["short"] = "s"; option["long"] = "stream"; option["help"] = "The name of the stream that this connector will provide in player mode"; config->addOption("streamname", option); capa["optional"]["debug"]["name"] = "debug"; capa["optional"]["debug"]["help"] = "The debug level at which messages need to be printed."; capa["optional"]["debug"]["option"] = "--debug"; capa["optional"]["debug"]["type"] = "debug"; packTime = 0; lastActive = Util::epoch(); playing = 0; playUntil = 0; singleton = this; isBuffer = false; }
void inputFLV::getNext(bool smart) { static JSON::Value thisPack; thisPack.null(); long long int lastBytePos = ftell(inFile); FLV::Tag tmpTag; while (!feof(inFile) && !FLV::Parse_Error){ if (tmpTag.FileLoader(inFile)){ thisPack = tmpTag.toJSON(myMeta); thisPack["bpos"] = lastBytePos; if ( !selectedTracks.count(thisPack["trackid"].asInt())){ getNext(); } break; } } if (FLV::Parse_Error){ std::cerr << FLV::Error_Str << std::endl; thisPack.null(); lastPack.null(); return; } std::string tmpStr = thisPack.toNetPacked(); lastPack.reInit(tmpStr.data(), tmpStr.size()); }
int OGG2DTSC(){ std::string oggBuffer; OGG::Page oggPage; //Read all of std::cin to oggBuffer double mspft = 0;//microseconds per frame double mspfv = 0;//microseconds per frame vorbis JSON::Value DTSCOut; JSON::Value DTSCHeader; DTSCHeader.null(); DTSCHeader["moreheader"] = 0ll; std::map<long unsigned int, oggTrack> trackData; long long int lastTrackID = 1; int headerSeen = 0; bool headerWritten = false;//important bool, used for outputting the simple DTSC header. //while stream busy while (std::cin.good()){ for (unsigned int i = 0; (i < 1024) && (std::cin.good()); i++){//buffering oggBuffer += std::cin.get(); } while (oggPage.read(oggBuffer)){//reading ogg to ogg::page //on succes, we handle one page long unsigned int sNum = oggPage.getBitstreamSerialNumber(); if (oggPage.typeBOS()){//defines a new track if (memcmp(oggPage.getFullPayload()+1, "theora", 6) == 0){ headerSeen += 1; headerWritten = false; trackData[sNum].codec = THEORA; //fix timerate here //frn/frd = fps theora::header tempHead; tempHead.read(oggPage.getFullPayload(), oggPage.getPayloadSize()); mspft = (double)(tempHead.getFRD() * 1000) / tempHead.getFRN(); }else if(memcmp(oggPage.getFullPayload()+1, "vorbis", 6) == 0){ headerSeen += 1; headerWritten = false; trackData[sNum].codec = VORBIS; vorbis::header tempHead; tempHead.read(oggPage.getFullPayload(), oggPage.getPayloadSize()); mspfv = (double)1000 / ntohl(tempHead.getAudioSampleRate()); }else{ std::cerr << "Unknown Codec, " << std::string(oggPage.getFullPayload()+1, 6)<<" skipping" << std::endl; continue; } trackData[sNum].dtscID = lastTrackID++; std::stringstream tID; tID << "track" << trackData[sNum].dtscID; trackData[sNum].name = tID.str(); } //if Serial number is available in mapping if(trackData.find(sNum)!=trackData.end()){//create DTSC from OGG page int offset = 0; for (std::deque<unsigned int>::iterator it = oggPage.getSegmentTableDeque().begin(); it != oggPage.getSegmentTableDeque().end(); it++){ if (trackData[sNum].parsedHeaders){ //if we are dealing with the last segment which is a part of a later continued segment if (it == (oggPage.getSegmentTableDeque().end()-1) && oggPage.getPageSegments() == 255 && oggPage.getSegmentTable()[254] == 255 ){ //put in buffer trackData[sNum].contBuffer += std::string(oggPage.getFullPayload()+offset, (*it)); }else{ //output DTSC packet DTSCOut.null();//clearing DTSC buffer DTSCOut["trackid"] = (long long)trackData[sNum].dtscID; long long unsigned int temp = oggPage.getGranulePosition(); DTSCOut["time"] = (long long)trackData[sNum].lastTime; if (trackData[sNum].contBuffer != ""){ //if a big segment is ending on this page, output buffer DTSCOut["data"] = trackData[sNum].contBuffer + std::string(oggPage.getFullPayload()+offset, (*it)); DTSCOut["comment"] = "Using buffer"; trackData[sNum].contBuffer = ""; }else{ DTSCOut["data"] = std::string(oggPage.getFullPayload()+offset, (*it)); //segment content put in JSON } DTSCOut["time"] = (long long)trackData[sNum].lastTime; if (trackData[sNum].codec == THEORA){ trackData[sNum].lastTime += mspft; }else{ //Getting current blockSize unsigned int blockSize = 0; Utils::bitstreamLSBF packet; packet.append(DTSCOut["data"].asString()); if (packet.get(1) == 0){ blockSize = trackData[sNum].blockSize[trackData[sNum].vModes[packet.get(vorbis::ilog(trackData[sNum].vModes.size()-1))].blockFlag]; }else{ std::cerr << "Warning! packet type != 0" << std::endl; } trackData[sNum].lastTime += mspfv * (blockSize/trackData[sNum].channels); } if (trackData[sNum].codec == THEORA){//marking keyframes if (it == (oggPage.getSegmentTableDeque().end() - 1)){ //if we are in the vicinity of a new keyframe if (trackData[sNum].idHeader.parseGranuleUpper(trackData[sNum].lastGran) != trackData[sNum].idHeader.parseGranuleUpper(temp)){ //try to mark right DTSCOut["keyframe"] = 1; trackData[sNum].lastGran = temp; }else{ DTSCOut["interframe"] = 1; } } } // Ending packet if (oggPage.typeContinue()){//Continuing page DTSCOut["OggCont"] = 1; } if (oggPage.typeEOS()){//ending page of ogg stream DTSCOut["OggEOS"] = 1; } std::cout << DTSCOut.toNetPacked(); } }else{//if we ouput a header: //switch on codec switch(trackData[sNum].codec){ case THEORA:{ theora::header tHead; if(tHead.read(oggPage.getFullPayload()+offset, (*it))){//if the current segment is a Theora header part //fillDTSC header switch(tHead.getHeaderType()){ case 0:{ //identification header trackData[sNum].idHeader = tHead; DTSCHeader["tracks"][trackData[sNum].name]["height"] = (long long)tHead.getPICH(); DTSCHeader["tracks"][trackData[sNum].name]["width"] = (long long)tHead.getPICW(); DTSCHeader["tracks"][trackData[sNum].name]["idheader"] = std::string(oggPage.getFullPayload()+offset, (*it)); break; } case 1: //comment header DTSCHeader["tracks"][trackData[sNum].name]["commentheader"] = std::string(oggPage.getFullPayload()+offset, (*it)); break; case 2:{ //setup header, also the point to start writing the header DTSCHeader["tracks"][trackData[sNum].name]["codec"] = "theora"; DTSCHeader["tracks"][trackData[sNum].name]["trackid"] = (long long)trackData[sNum].dtscID; DTSCHeader["tracks"][trackData[sNum].name]["type"] = "video"; DTSCHeader["tracks"][trackData[sNum].name]["init"] = std::string(oggPage.getFullPayload()+offset, (*it)); headerSeen --; trackData[sNum].parsedHeaders = true; trackData[sNum].lastGran = 0; break; } } } break; } case VORBIS:{ vorbis::header vHead; if(vHead.read(oggPage.getFullPayload()+offset, (*it))){//if the current segment is a Vorbis header part switch(vHead.getHeaderType()){ case 1:{ DTSCHeader["tracks"][trackData[sNum].name]["channels"] = (long long)vHead.getAudioChannels(); DTSCHeader["tracks"][trackData[sNum].name]["idheader"] = std::string(oggPage.getFullPayload()+offset, (*it)); trackData[sNum].channels = vHead.getAudioChannels(); trackData[sNum].blockSize[0] = 1 << vHead.getBlockSize0(); trackData[sNum].blockSize[1] = 1 << vHead.getBlockSize1(); break; } case 3:{ DTSCHeader["tracks"][trackData[sNum].name]["commentheader"] = std::string(oggPage.getFullPayload()+offset, (*it)); break; } case 5:{ DTSCHeader["tracks"][trackData[sNum].name]["codec"] = "vorbis"; DTSCHeader["tracks"][trackData[sNum].name]["trackid"] = (long long)trackData[sNum].dtscID; DTSCHeader["tracks"][trackData[sNum].name]["type"] = "audio"; DTSCHeader["tracks"][trackData[sNum].name]["init"] = std::string(oggPage.getFullPayload()+offset, (*it)); //saving modes into deque trackData[sNum].vModes = vHead.readModeDeque(trackData[sNum].channels); headerSeen --; trackData[sNum].parsedHeaders = true; break; } default:{ std::cerr << "Unsupported header type for vorbis" << std::endl; } } }else{ std::cerr << "Unknown Header" << std::endl; } break; } default: std::cerr << "Can not handle this codec" << std::endl; break; } } offset += (*it); } }else{ std::cerr <<"Error! Unknown bitstream number " << oggPage.getBitstreamSerialNumber() << std::endl; } //write header here if (headerSeen == 0 && headerWritten == false){ std::cout << DTSCHeader.toNetPacked(); headerWritten = true; } //write section if (oggPage.typeEOS()){//ending page //remove from trackdata trackData.erase(sNum); } } } std::cerr << "DTSC file created succesfully" << std::endl; return 0; }
/// This takes a "totals" request, and fills in the response data. /// /// \api /// `"totals"` requests take the form of: /// ~~~~~~~~~~~~~~~{.js} /// { /// //array of streamnames to accumulate. Empty means all. /// "streams": ["streama", "streamb", "streamc"], /// //array of protocols to accumulate. Empty means all. /// "protocols": ["HLS", "HSS"], /// //list of requested data fields. Empty means all. /// "fields": ["clients", "downbps", "upbps"], /// //unix timestamp of data start. Negative means X seconds ago. Empty means earliest available. /// "start": 1234567 /// //unix timestamp of data end. Negative means X seconds ago. Empty means latest available (usually 'now'). /// "end": 1234567 /// } /// ~~~~~~~~~~~~~~~ /// and are responded to as: /// ~~~~~~~~~~~~~~~{.js} /// { /// //unix timestamp of start of data. Always present, always absolute. /// "start": 1234567, /// //unix timestamp of end of data. Always present, always absolute. /// "end": 1234567, /// //array of actually represented data fields. /// "fields": [...] /// // Time between datapoints. Here: 10 points with each 5 seconds afterwards, followed by 10 points with each 1 second afterwards. /// "interval": [[10, 5], [10, 1]], /// //the data for the times as mentioned in the "interval" field, in the order they appear in the "fields" field. /// "data": [[x, y, z], [x, y, z], [x, y, z]] /// } /// ~~~~~~~~~~~~~~~ void Controller::fillTotals(JSON::Value & req, JSON::Value & rep){ //first, figure out the timestamps wanted long long int reqStart = 0; long long int reqEnd = 0; if (req.isMember("start")){ reqStart = req["start"].asInt(); } if (req.isMember("end")){ reqEnd = req["end"].asInt(); } //add the current time, if negative or zero. if (reqStart < 0){ reqStart += Util::epoch(); } if (reqStart == 0){ reqStart = Util::epoch() - STAT_CUTOFF; } if (reqEnd <= 0){ reqEnd += Util::epoch(); } //at this point, reqStart and reqEnd are the absolute timestamp. unsigned int fields = 0; //next, figure out the fields wanted if (req.isMember("fields") && req["fields"].size()){ for (JSON::ArrIter it = req["fields"].ArrBegin(); it != req["fields"].ArrEnd(); it++){ if ((*it).asStringRef() == "clients"){fields |= STAT_TOT_CLIENTS;} if ((*it).asStringRef() == "downbps"){fields |= STAT_TOT_BPS_DOWN;} if ((*it).asStringRef() == "upbps"){fields |= STAT_TOT_BPS_UP;} } } //select all, if none selected if (!fields){fields = STAT_TOT_ALL;} //figure out what streams are wanted std::set<std::string> streams; if (req.isMember("streams") && req["streams"].size()){ for (JSON::ArrIter it = req["streams"].ArrBegin(); it != req["streams"].ArrEnd(); it++){ streams.insert((*it).asStringRef()); } } //figure out what protocols are wanted std::set<std::string> protos; if (req.isMember("protocols") && req["protocols"].size()){ for (JSON::ArrIter it = req["protocols"].ArrBegin(); it != req["protocols"].ArrEnd(); it++){ protos.insert((*it).asStringRef()); } } //output the selected fields rep["fields"].null(); if (fields & STAT_TOT_CLIENTS){rep["fields"].append("clients");} if (fields & STAT_TOT_BPS_DOWN){rep["fields"].append("downbps");} if (fields & STAT_TOT_BPS_UP){rep["fields"].append("upbps");} //start data collection std::map<long long unsigned int, totalsData> totalsCount; //start with current connections if (curConns.size()){ for (std::map<unsigned long, statStorage>::iterator it = curConns.begin(); it != curConns.end(); it++){ //data present and wanted? insert it! if (it->second.log.size() > 1 && (it->second.log.rbegin()->first >= (unsigned long long)reqStart || it->second.log.begin()->first <= (unsigned long long)reqEnd) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){ //keep track of the previous and current, starting at position 2 so there's always a delta down/up value. std::map<unsigned long long, statLog>::iterator pi = it->second.log.begin(); for (std::map<unsigned long long, statLog>::iterator li = ++(it->second.log.begin()); li != it->second.log.end(); li++){ if (li->first < (unsigned long long)reqStart || pi->first > (unsigned long long)reqEnd){ continue; } unsigned int diff = li->first - pi->first; unsigned int ddown = (li->second.down - pi->second.down) / diff; unsigned int dup = (li->second.up - pi->second.up) / diff; for (long long unsigned int t = pi->first; t < li->first; t++){ if (t >= (unsigned long long)reqStart && t <= (unsigned long long)reqEnd){ totalsCount[t].add(ddown, dup); } } pi = li;//set previous iterator to log iterator } } } } //look at history if (oldConns.size()){ for (std::map<unsigned long long int, statStorage>::iterator it = oldConns.begin(); it != oldConns.end(); it++){ //data present and wanted? insert it! if (it->second.log.size() > 1 && (it->second.log.rbegin()->first >= (unsigned long long)reqStart || it->second.log.begin()->first <= (unsigned long long)reqEnd) && (!streams.size() || streams.count(it->second.streamName)) && (!protos.size() || protos.count(it->second.connector))){ //keep track of the previous and current, starting at position 2 so there's always a delta down/up value. std::map<unsigned long long, statLog>::iterator pi = it->second.log.begin(); for (std::map<unsigned long long, statLog>::iterator li = ++(it->second.log.begin()); li != it->second.log.end(); li++){ if (li->first < (unsigned long long)reqStart || pi->first > (unsigned long long)reqEnd){ continue; } unsigned int diff = li->first - pi->first; unsigned int ddown = (li->second.down - pi->second.down) / diff; unsigned int dup = (li->second.up - pi->second.up) / diff; for (long long unsigned int t = pi->first; t < li->first; t++){ if (t >= (unsigned long long)reqStart && t <= (unsigned long long)reqEnd){ totalsCount[t].add(ddown, dup); } } pi = li;//set previous iterator to log iterator } } } } //output the data itself if (!totalsCount.size()){ //Oh noes! No data. We'll just reply with a bunch of nulls. rep["start"].null(); rep["end"].null(); rep["data"].null(); rep["interval"].null(); return; } //yay! We have data! rep["start"] = (long long)totalsCount.begin()->first; rep["end"] = (long long)totalsCount.rbegin()->first; rep["data"].null(); rep["interval"].null(); long long prevT = 0; JSON::Value i; for (std::map<long long unsigned int, totalsData>::iterator it = totalsCount.begin(); it != totalsCount.end(); it++){ JSON::Value d; if (fields & STAT_TOT_CLIENTS){d.append(it->second.clients);} if (fields & STAT_TOT_BPS_DOWN){d.append(it->second.downbps);} if (fields & STAT_TOT_BPS_UP){d.append(it->second.upbps);} rep["data"].append(d); if (prevT){ if (i.size() < 2){ i.append(1ll); i.append((long long)(it->first - prevT)); }else{ if (i[1u].asInt() != (long long)(it->first - prevT)){ rep["interval"].append(i); i[0u] = 1ll; i[1u] = (long long)(it->first - prevT); }else{ i[0u] = i[0u].asInt() + 1; } } } prevT = it->first; } if (i.size() > 1){ rep["interval"].append(i); i.null(); } //all done! return is by reference, so no need to return anything here. }
int DTSCMerge(int argc, char ** argv){ Util::Config conf = Util::Config(argv[0], PACKAGE_VERSION); conf.addOption("output", JSON::fromString("{\"arg_num\":1, \"arg\":\"string\", \"help\":\"Filename of the output file.\"}")); conf.addOption("input", JSON::fromString("{\"arg_num\":2, \"arg\":\"string\", \"help\":\"Filename of the first input file.\"}")); conf.addOption("[additional_inputs ...]", JSON::fromString("{\"arg_num\":3, \"default\":\"\", \"arg\":\"string\", \"help\":\"Filenames of any number of aditional inputs.\"}")); conf.parseArgs(argc, argv); DTSC::File outFile; JSON::Value meta; DTSC::Meta newMeta; std::map<std::string,std::map<int, int> > trackMapping; bool fullSort = true; std::map<std::string, DTSC::File> inFiles; std::map<std::string, DTSC::Meta> metaData; std::string outFileName = argv[1]; std::string tmpFileName; for (int i = 2; i < argc; i++){ tmpFileName = argv[i]; if (tmpFileName == outFileName){ fullSort = false; }else{ DTSC::File F(tmpFileName); if (!F.getMeta().isFixed()){ std::cerr << tmpFileName << " has not been run through DTSCFix yet." << std::endl; return 1; } inFiles[tmpFileName] = F; } } if (fullSort){ outFile = DTSC::File(outFileName, true); }else{ outFile = DTSC::File(outFileName); if ( !outFile.getMeta().isFixed()){ std::cerr << outFileName << " has not been run through DTSCFix yet." << std::endl; return 1; } meta = outFile.getMeta().toJSON(); newMeta = DTSC::Meta(meta); if (meta.isMember("tracks") && meta["tracks"].size() > 0){ for (JSON::ObjIter trackIt = meta["tracks"].ObjBegin(); trackIt != meta["tracks"].ObjEnd(); trackIt++){ int nxtMap = getNextFree(trackMapping); trackMapping[argv[1]].insert(std::pair<int,int>(trackIt->second["trackid"].asInt(),nxtMap)); newMeta.tracks[nxtMap].trackID = nxtMap; } } } std::multimap<int,keyframeInfo> allSorted; for (std::map<std::string,DTSC::File>::iterator it = inFiles.begin(); it != inFiles.end(); it++){ DTSC::Meta tmpMeta(it->second.getMeta()); for (std::map<int,DTSC::Track>::iterator trackIt = tmpMeta.tracks.begin(); trackIt != tmpMeta.tracks.end(); trackIt++){ long long int oldID = trackIt->first; long long int mappedID = getNextFree(trackMapping); trackMapping[it->first].insert(std::pair<int,int>(oldID,mappedID)); for (std::deque<DTSC::Key>::iterator keyIt = trackIt->second.keys.begin(); keyIt != trackIt->second.keys.end(); keyIt++){ keyframeInfo tmpInfo; tmpInfo.fileName = it->first; tmpInfo.trackID = oldID; tmpInfo.keyTime = keyIt->getTime(); tmpInfo.keyBPos = keyIt->getBpos(); tmpInfo.keyNum = keyIt->getNumber(); tmpInfo.keyLen = keyIt->getLength(); if ((keyIt + 1) != trackIt->second.keys.end()){ tmpInfo.endBPos = (keyIt + 1)->getBpos(); }else{ tmpInfo.endBPos = it->second.getBytePosEOF(); } allSorted.insert(std::pair<int,keyframeInfo>(keyIt->getTime(),tmpInfo)); } newMeta.tracks[mappedID] = trackIt->second; newMeta.tracks[mappedID].trackID = mappedID; newMeta.tracks[mappedID].reset(); } } if (fullSort){ meta.null(); meta["moreheader"] = 0ll; std::string tmpWrite = meta.toPacked(); outFile.writeHeader(tmpWrite,true); } std::set<int> trackSelector; for (std::multimap<int,keyframeInfo>::iterator sortIt = allSorted.begin(); sortIt != allSorted.end(); sortIt++){ trackSelector.clear(); trackSelector.insert(sortIt->second.trackID); inFiles[sortIt->second.fileName].selectTracks(trackSelector); inFiles[sortIt->second.fileName].seek_time(sortIt->second.keyTime); inFiles[sortIt->second.fileName].seekNext(); while (inFiles[sortIt->second.fileName].getPacket() && inFiles[sortIt->second.fileName].getBytePos() <= sortIt->second.endBPos && !inFiles[sortIt->second.fileName].reachedEOF()){ if (inFiles[sortIt->second.fileName].getPacket().getTrackId() == sortIt->second.trackID){ JSON::Value tmp = inFiles[sortIt->second.fileName].getPacket().toJSON(); tmp["trackid"] = trackMapping[sortIt->second.fileName][sortIt->second.trackID]; outFile.writePacket(tmp); } inFiles[sortIt->second.fileName].seekNext(); } } if (fullSort || (meta.isMember("merged") && meta["merged"])){ newMeta.merged = 1; }else{ newMeta.merged = 0; } std::string writeMeta = newMeta.toJSON().toPacked(); meta["moreheader"] = outFile.addHeader(writeMeta); writeMeta = meta.toPacked(); outFile.writeHeader(writeMeta); return 0; }
///\brief Gets and parses one RTMP chunk at a time. ///\param inputBuffer A buffer filled with chunk data. void parseChunk(Socket::Buffer & inputBuffer){ //for DTSC conversion static JSON::Value meta_out; static std::stringstream prebuffer; // Temporary buffer before sending real data static bool sending = false; static unsigned int counter = 0; //for chunk parsing static RTMPStream::Chunk next; static FLV::Tag F; static AMF::Object amfdata("empty", AMF::AMF0_DDV_CONTAINER); static AMF::Object amfelem("empty", AMF::AMF0_DDV_CONTAINER); static AMF::Object3 amf3data("empty", AMF::AMF3_DDV_CONTAINER); static AMF::Object3 amf3elem("empty", AMF::AMF3_DDV_CONTAINER); while (next.Parse(inputBuffer)){ //send ACK if we received a whole window if ((RTMPStream::rec_cnt - RTMPStream::rec_window_at > RTMPStream::rec_window_size)){ RTMPStream::rec_window_at = RTMPStream::rec_cnt; Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3) } switch (next.msg_type_id){ case 0: //does not exist #if DEBUG >= 2 fprintf(stderr, "UNKN: Received a zero-type message. Possible data corruption? Aborting!\n"); #endif while (inputBuffer.size()){ inputBuffer.get().clear(); } ss.close(); Socket.close(); break; //happens when connection breaks unexpectedly case 1: //set chunk size RTMPStream::chunk_rec_max = ntohl(*(int*)next.data.c_str()); #if DEBUG >= 5 fprintf(stderr, "CTRL: Set chunk size: %i\n", RTMPStream::chunk_rec_max); #endif break; case 2: //abort message - we ignore this one #if DEBUG >= 5 fprintf(stderr, "CTRL: Abort message\n"); #endif //4 bytes of stream id to drop break; case 3: //ack #if DEBUG >= 8 fprintf(stderr, "CTRL: Acknowledgement\n"); #endif RTMPStream::snd_window_at = ntohl(*(int*)next.data.c_str()); RTMPStream::snd_window_at = RTMPStream::snd_cnt; break; case 4: { //2 bytes event type, rest = event data //types: //0 = stream begin, 4 bytes ID //1 = stream EOF, 4 bytes ID //2 = stream dry, 4 bytes ID //3 = setbufferlen, 4 bytes ID, 4 bytes length //4 = streamisrecorded, 4 bytes ID //6 = pingrequest, 4 bytes data //7 = pingresponse, 4 bytes data //we don't need to process this #if DEBUG >= 5 short int ucmtype = ntohs(*(short int*)next.data.c_str()); switch (ucmtype){ case 0: fprintf(stderr, "CTRL: UCM StreamBegin %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; case 1: fprintf(stderr, "CTRL: UCM StreamEOF %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; case 2: fprintf(stderr, "CTRL: UCM StreamDry %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; case 3: fprintf(stderr, "CTRL: UCM SetBufferLength %i %i\n", ntohl(*((int*)(next.data.c_str()+2))), ntohl(*((int*)(next.data.c_str()+6)))); break; case 4: fprintf(stderr, "CTRL: UCM StreamIsRecorded %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; case 6: fprintf(stderr, "CTRL: UCM PingRequest %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; case 7: fprintf(stderr, "CTRL: UCM PingResponse %i\n", ntohl(*((int*)(next.data.c_str()+2)))); break; default: fprintf(stderr, "CTRL: UCM Unknown (%hi)\n", ucmtype); break; } #endif } break; case 5: //window size of other end #if DEBUG >= 5 fprintf(stderr, "CTRL: Window size\n"); #endif RTMPStream::rec_window_size = ntohl(*(int*)next.data.c_str()); RTMPStream::rec_window_at = RTMPStream::rec_cnt; Socket.Send(RTMPStream::SendCTL(3, RTMPStream::rec_cnt)); //send ack (msg 3) break; case 6: #if DEBUG >= 5 fprintf(stderr, "CTRL: Set peer bandwidth\n"); #endif //4 bytes window size, 1 byte limit type (ignored) RTMPStream::snd_window_size = ntohl(*(int*)next.data.c_str()); Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5) break; case 8: //audio data case 9: //video data case 18: //meta data if (ss.connected()){ if (streamReset){ //reset push data to empty, in case stream properties change meta_out.null(); prebuffer.str(""); sending = false; counter = 0; streamReset = false; } F.ChunkLoader(next); JSON::Value pack_out = F.toJSON(meta_out); if ( !pack_out.isNull()){ if ( !sending){ counter++; if (counter > 8){ sending = true; ss.SendNow(meta_out.toNetPacked()); ss.SendNow(prebuffer.str().c_str(), prebuffer.str().size()); //write buffer prebuffer.str(""); //clear buffer ss.SendNow(pack_out.toNetPacked()); }else{ prebuffer << pack_out.toNetPacked(); } }else{ ss.SendNow(pack_out.toNetPacked()); } } }else{ #if DEBUG >= 5 fprintf(stderr, "Received useless media data\n"); #endif Socket.close(); } break; case 15: #if DEBUG >= 5 fprintf(stderr, "Received AFM3 data message\n"); #endif break; case 16: #if DEBUG >= 5 fprintf(stderr, "Received AFM3 shared object\n"); #endif break; case 17: { #if DEBUG >= 5 fprintf(stderr, "Received AFM3 command message\n"); #endif if (next.data[0] != 0){ next.data = next.data.substr(1); amf3data = AMF::parse3(next.data); #if DEBUG >= 5 amf3data.Print(); #endif }else{ #if DEBUG >= 5 fprintf(stderr, "Received AFM3-0 command message\n"); #endif next.data = next.data.substr(1); amfdata = AMF::parse(next.data); parseAMFCommand(amfdata, 17, next.msg_stream_id); } //parsing AMF0-style } break; case 19: #if DEBUG >= 5 fprintf(stderr, "Received AFM0 shared object\n"); #endif break; case 20: { //AMF0 command message amfdata = AMF::parse(next.data); parseAMFCommand(amfdata, 20, next.msg_stream_id); } break; case 22: #if DEBUG >= 5 fprintf(stderr, "Received aggregate message\n"); #endif break; default: #if DEBUG >= 1 fprintf(stderr, "Unknown chunk received! Probably protocol corruption, stopping parsing of incoming data.\n"); #endif stopParsing = true; break; } } } //parseChunk