///\brief Converts DTSC from stdin to FLV on stdout. ///\return The return code for the converter. int DTSC2FLV(){ FLV::Tag FLV_out; // Temporary storage for outgoing FLV data. DTSC::Stream Strm; std::string inBuffer; char charBuffer[1024 * 10]; unsigned int charCount; bool doneheader = false; int videoID = -1, audioID = -1; while (std::cin.good()){ if (Strm.parsePacket(inBuffer)){ if ( !doneheader){ //find first audio and video tracks for (JSON::ObjIter objIt = Strm.metadata["tracks"].ObjBegin(); objIt != Strm.metadata["tracks"].ObjEnd(); objIt++){ if (videoID == -1 && objIt->second["type"].asString() == "video"){ videoID = objIt->second["trackid"].asInt(); } if (audioID == -1 && objIt->second["type"].asString() == "audio"){ audioID = objIt->second["trackid"].asInt(); } } doneheader = true; std::cout.write(FLV::Header, 13); FLV_out.DTSCMetaInit(Strm, Strm.getTrackById(videoID), Strm.getTrackById(audioID)); std::cout.write(FLV_out.data, FLV_out.len); if (videoID && Strm.getTrackById(videoID).isMember("init")){ FLV_out.DTSCVideoInit(Strm.getTrackById(videoID)); std::cout.write(FLV_out.data, FLV_out.len); } if (audioID && Strm.getTrackById(audioID).isMember("init")){ FLV_out.DTSCAudioInit(Strm.getTrackById(audioID)); std::cout.write(FLV_out.data, FLV_out.len); } } if (FLV_out.DTSCLoader(Strm)){ std::cout.write(FLV_out.data, FLV_out.len); } }else{ std::cin.read(charBuffer, 1024 * 10); charCount = std::cin.gcount(); inBuffer.append(charBuffer, charCount); } } std::cerr << "Done!" << std::endl; return 0; } //FLV2DTSC
///\brief Converts DTSC from stdin to FLV on stdout. ///\return The return code for the converter. int DTSC2FLV(){ FLV::Tag FLV_out; // Temporary storage for outgoing FLV data. DTSC::Stream Strm; std::string inBuffer; char charBuffer[1024 * 10]; unsigned int charCount; bool doneheader = false; while (std::cin.good()){ if (Strm.parsePacket(inBuffer)){ if ( !doneheader){ doneheader = true; std::cout.write(FLV::Header, 13); FLV_out.DTSCMetaInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ FLV_out.DTSCVideoInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); } if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ FLV_out.DTSCAudioInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); } } if (FLV_out.DTSCLoader(Strm)){ std::cout.write(FLV_out.data, FLV_out.len); } }else{ std::cin.read(charBuffer, 1024 * 10); charCount = std::cin.gcount(); inBuffer.append(charBuffer, charCount); } } std::cerr << "Done!" << std::endl; return 0; } //FLV2DTSC
///\brief Main function for the TS Connector ///\param conn A socket describing the connection the client. ///\param streamName The stream to connect to. ///\return The exit code of the connector. int tsConnector(Socket::Connection conn, std::string streamName, std::string trackIDs){ std::string ToPack; TS::Packet PackData; std::string DTMIData; int PacketNumber = 0; long long unsigned int TimeStamp = 0; int ThisNaluSize; char VideoCounter = 0; char AudioCounter = 0; bool WritePesHeader; bool IsKeyFrame; bool FirstKeyFrame = true; bool FirstIDRInKeyFrame; MP4::AVCC avccbox; bool haveAvcc = false; DTSC::Stream Strm; bool inited = false; Socket::Connection ss; while (conn.connected()){ if ( !inited){ ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif conn.close(); break; } if(trackIDs == ""){ // no track ids given? Find the first video and first audio track (if available) and use those! int videoID = -1; int audioID = -1; Strm.waitForMeta(ss); if (Strm.metadata.isMember("tracks")){ for (JSON::ObjIter trackIt = Strm.metadata["tracks"].ObjBegin(); trackIt != Strm.metadata["tracks"].ObjEnd(); trackIt++){ if (audioID == -1 && trackIt->second["type"].asString() == "audio"){ audioID = trackIt->second["trackid"].asInt(); if( trackIDs != ""){ trackIDs += " " + trackIt->second["trackid"].asString(); }else{ trackIDs = trackIt->second["trackid"].asString(); } } if (videoID == -1 && trackIt->second["type"].asString() == "video"){ videoID = trackIt->second["trackid"].asInt(); if( trackIDs != ""){ trackIDs += " " + trackIt->second["trackid"].asString(); }else{ trackIDs = trackIt->second["trackid"].asString(); } } } // for iterator } // if isMember("tracks") } // if trackIDs == "" std::string cmd = "t " + trackIDs + "\ns 0\np\n"; ss.SendNow( cmd ); inited = true; } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ std::stringstream TSBuf; Socket::Buffer ToPack; //write PAT and PMT TS packets if (PacketNumber == 0){ PackData.DefaultPAT(); TSBuf.write(PackData.ToString(), 188); PackData.DefaultPMT(); TSBuf.write(PackData.ToString(), 188); PacketNumber += 2; } int PIDno = 0; char * ContCounter = 0; if (Strm.lastType() == DTSC::VIDEO){ if ( !haveAvcc){ avccbox.setPayload(Strm.getTrackById(Strm.getPacket()["trackid"].asInt())["init"].asString()); haveAvcc = true; } IsKeyFrame = Strm.getPacket().isMember("keyframe"); if (IsKeyFrame){ TimeStamp = (Strm.getPacket()["time"].asInt() * 27000); } ToPack.append(avccbox.asAnnexB()); while (Strm.lastData().size() > 4){ ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3]; Strm.lastData().replace(0, 4, TS::NalHeader, 4); if (ThisNaluSize + 4 == Strm.lastData().size()){ ToPack.append(Strm.lastData()); break; }else{ ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4); Strm.lastData().erase(0, ThisNaluSize + 4); } } ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90)); PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt(); ContCounter = &VideoCounter; }else if (Strm.lastType() == DTSC::AUDIO){ ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.getTrackById(Strm.getPacket()["trackid"].asInt())["init"].asString())); ToPack.append(Strm.lastData()); ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90)); PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt(); ContCounter = &AudioCounter; IsKeyFrame = false; } //initial packet PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); PackData.UnitStart(1); if (IsKeyFrame){ PackData.RandomAccess(1); PackData.PCR(TimeStamp); } unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184)); std::string gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; //rest of packets while (ToPack.size()){ PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); toSend = PackData.AddStuffing(ToPack.bytes(184)); gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; } TSBuf.flush(); if (TSBuf.str().size()){ conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size()); TSBuf.str(""); } TSBuf.str(""); PacketNumber = 0; } }else{ Util::sleep(1000); conn.spool(); } } return 0; }
/// Main function for Connector_HTTP_Dynamic int Connector_HTTP_Dynamic(Socket::Connection conn){ std::deque<std::string> FlashBuf; std::vector<int> Timestamps; int FlashBufSize = 0; long long int FlashBufTime = 0; DTSC::Stream Strm; //Incoming stream buffer. HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender. bool ready4data = false; //Set to true when streaming is to begin. bool pending_manifest = false; bool receive_marks = false; //when set to true, this stream will ignore keyframes and instead use pause marks bool inited = false; Socket::Connection ss( -1); std::string streamname; std::string recBuffer = ""; bool wantsVideo = false; bool wantsAudio = false; std::string Quality; int Segment = -1; long long int ReqFragment = -1; int temp; std::string tempStr; int Flash_RequestPending = 0; unsigned int lastStats = 0; conn.setBlocking(false); //do not block on conn.spool() when no data is available while (conn.connected()){ if (conn.spool() || conn.Received().size()){ //make sure it ends in a \n if ( *(conn.Received().get().rbegin()) != '\n'){ std::string tmp = conn.Received().get(); conn.Received().get().clear(); if (conn.Received().size()){ conn.Received().get().insert(0, tmp); }else{ conn.Received().append(tmp); } } if (HTTP_R.Read(conn.Received().get())){ #if DEBUG >= 4 std::cout << "Received request: " << HTTP_R.getUrl() << std::endl; #endif conn.setHost(HTTP_R.GetHeader("X-Origin")); if (HTTP_R.url.find("Manifest") == std::string::npos){ streamname = HTTP_R.url.substr(8, HTTP_R.url.find("/", 8) - 12); if ( !ss){ ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif ss.close(); HTTP_S.Clean(); HTTP_S.SetBody("No such stream " + streamname + " is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); ready4data = false; continue; } ss.setBlocking(false); inited = true; } Quality = HTTP_R.url.substr(HTTP_R.url.find("/Q(", 8) + 3); Quality = Quality.substr(0, Quality.find(")")); tempStr = HTTP_R.url.substr(HTTP_R.url.find(")/") + 2); wantsAudio = false; wantsVideo = false; if (tempStr[0] == 'A'){ wantsAudio = true; } if (tempStr[0] == 'V'){ wantsVideo = true; } tempStr = tempStr.substr(tempStr.find("(") + 1); ReqFragment = atoll(tempStr.substr(0, tempStr.find(")")).c_str()); #if DEBUG >= 4 printf("Quality: %s, Frag %d\n", Quality.c_str(), (ReqFragment / 10000)); #endif std::stringstream sstream; sstream << "s " << (ReqFragment / 10000) << "\no \n"; ss.SendNow(sstream.str().c_str()); Flash_RequestPending++; }else{ streamname = HTTP_R.url.substr(8, HTTP_R.url.find("/", 8) - 12); if ( !Strm.metadata.isNull()){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; }else{ pending_manifest = true; } } ready4data = true; HTTP_R.Clean(); //clean for any possible next requests } }else{ if (Flash_RequestPending){ usleep(1000); //sleep 1ms }else{ usleep(10000); //sleep 10ms } } if (ready4data){ if ( !inited){ //we are ready, connect the socket! ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif ss.close(); HTTP_S.Clean(); HTTP_S.SetBody("No such stream " + streamname + " is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); ready4data = false; continue; } ss.setBlocking(false); #if DEBUG >= 3 fprintf(stderr, "Everything connected, starting to send video data...\n"); #endif inited = true; } unsigned int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(conn.getStats("HTTP_Smooth").c_str()); } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (Strm.getPacket(0).isMember("time")){ if ( !Strm.metadata.isMember("firsttime")){ Strm.metadata["firsttime"] = Strm.getPacket(0)["time"]; }else{ if ( !Strm.metadata.isMember("length") || Strm.metadata["length"].asInt() == 0){ Strm.getPacket(0)["time"] = Strm.getPacket(0)["time"].asInt() - Strm.metadata["firsttime"].asInt(); } } Strm.metadata["lasttime"] = Strm.getPacket(0)["time"]; } if (pending_manifest){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; } if ( !receive_marks && Strm.metadata.isMember("length")){ receive_marks = true; } if (Strm.lastType() == DTSC::PAUSEMARK){ Timestamps.push_back(Strm.getPacket(0)["time"].asInt()); } if ((Strm.getPacket(0).isMember("keyframe") && !receive_marks) || Strm.lastType() == DTSC::PAUSEMARK){ #if DEBUG >= 4 fprintf(stderr, "Received a %s fragment of %i bytes.\n", Strm.getPacket(0)["datatype"].asString().c_str(), FlashBufSize); #endif if (Flash_RequestPending > 0 && FlashBufSize){ #if DEBUG >= 3 fprintf(stderr, "Sending a fragment..."); #endif //static std::string btstrp; //btstrp = GenerateBootstrap(streamname, Strm.metadata, ReqFragment, FlashBufTime, Strm.getPacket(0)["time"]); HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "video/mp4"); HTTP_S.SetBody(""); int myDuration; MP4::MFHD mfhd_box; for (int i = 0; i < Strm.metadata["keytime"].size(); i++){ if (Strm.metadata["keytime"][i].asInt() >= (ReqFragment / 10000)){ mfhd_box.setSequenceNumber(i + 1); if (i != Strm.metadata["keytime"].size()){ myDuration = Strm.metadata["keytime"][i + 1].asInt() - Strm.metadata["keytime"][i].asInt(); }else{ myDuration = Strm.metadata["lastms"].asInt() - Strm.metadata["keytime"][i].asInt(); } myDuration = myDuration * 10000; break; } } MP4::TFHD tfhd_box; tfhd_box.setFlags(MP4::tfhdSampleFlag); tfhd_box.setTrackID(1); tfhd_box.setDefaultSampleFlags(0x000000C0 | MP4::noIPicture | MP4::noDisposable | MP4::noKeySample); MP4::TRUN trun_box; //maybe reinsert dataOffset trun_box.setFlags(MP4::trundataOffset | MP4::trunfirstSampleFlags | MP4::trunsampleDuration | MP4::trunsampleSize); trun_box.setDataOffset(42); trun_box.setFirstSampleFlags(0x00000040 | MP4::isIPicture | MP4::noDisposable | MP4::isKeySample); for (int i = 0; i < FlashBuf.size(); i++){ MP4::trunSampleInformation trunSample; trunSample.sampleSize = FlashBuf[i].size(); //trunSample.sampleDuration = (Timestamps[i+1]-Timestamps[i]) * 10000; trunSample.sampleDuration = (((double)myDuration / FlashBuf.size()) * i) - (((double)myDuration / FlashBuf.size()) * (i - 1)); trun_box.setSampleInformation(trunSample, i); } MP4::SDTP sdtp_box; sdtp_box.setVersion(0); sdtp_box.setValue(0x24, 4); for (int i = 1; i < FlashBuf.size(); i++){ sdtp_box.setValue(0x14, 4 + i); } MP4::TRAF traf_box; traf_box.setContent(tfhd_box, 0); traf_box.setContent(trun_box, 1); traf_box.setContent(sdtp_box, 2); MP4::MOOF moof_box; moof_box.setContent(mfhd_box, 0); moof_box.setContent(traf_box, 1); //setting tha offsets! trun_box.setDataOffset(moof_box.boxedSize() + 8); traf_box.setContent(trun_box, 1); moof_box.setContent(traf_box, 1); //std::cerr << "\t[encoded] = " << ((MP4::TRUN&)(((MP4::TRAF&)(moof_box.getContent(1))).getContent(1))).getDataOffset() << std::endl; HTTP_S.SetHeader("Content-Length", FlashBufSize + 8 + moof_box.boxedSize()); //32+33+btstrp.size()); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); conn.SendNow(moof_box.asBox(), moof_box.boxedSize()); unsigned long size = htonl(FlashBufSize+8); conn.SendNow((char*) &size, 4); conn.SendNow("mdat", 4); while (FlashBuf.size() > 0){ conn.SendNow(FlashBuf.front()); FlashBuf.pop_front(); } Flash_RequestPending--; #if DEBUG >= 3 fprintf(stderr, "Done\n"); #endif } FlashBuf.clear(); FlashBufSize = 0; } if ((wantsAudio && Strm.lastType() == DTSC::AUDIO) || (wantsVideo && Strm.lastType() == DTSC::VIDEO)){ FlashBuf.push_back(Strm.lastData()); FlashBufSize += Strm.lastData().size(); Timestamps.push_back(Strm.getPacket(0)["time"].asInt()); } } if (pending_manifest && !Strm.metadata.isNull()){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; } } if ( !ss.connected()){ break; } } } conn.close(); ss.SendNow(conn.getStats("HTTP_Smooth").c_str()); ss.close(); return 0; } //Connector_HTTP_Smooth main function
///\brief Main function for the HTTP Dynamic Connector ///\param conn A socket describing the connection the client. ///\return The exit code of the connector. int dynamicConnector(Socket::Connection conn){ std::deque<std::string> FlashBuf; int FlashBufSize = 0; long long int FlashBufTime = 0; FLV::Tag tmp; //temporary tag DTSC::Stream Strm; //Incoming stream buffer. HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender. Socket::Connection ss( -1); std::string streamname; std::string recBuffer = ""; std::string Quality; int Segment = -1; int ReqFragment = -1; unsigned int lastStats = 0; conn.setBlocking(false); //do not block on conn.spool() when no data is available while (conn.connected()){ if (conn.spool() || conn.Received().size()){ //make sure it ends in a \n if ( *(conn.Received().get().rbegin()) != '\n'){ std::string tmp = conn.Received().get(); conn.Received().get().clear(); if (conn.Received().size()){ conn.Received().get().insert(0, tmp); }else{ conn.Received().append(tmp); } } if (HTTP_R.Read(conn.Received().get())){ #if DEBUG >= 5 std::cout << "Received request: " << HTTP_R.getUrl() << std::endl; #endif conn.setHost(HTTP_R.GetHeader("X-Origin")); streamname = HTTP_R.GetHeader("X-Stream"); if ( !ss){ ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ HTTP_S.Clean(); HTTP_S.SetBody("No such stream is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); continue; } ss.setBlocking(false); //make sure metadata is received while ( !Strm.metadata && ss.connected()){ if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ //do nothing } } } } if (HTTP_R.url.find(".abst") != std::string::npos){ HTTP_S.Clean(); HTTP_S.SetBody(dynamicBootstrap(streamname, Strm.metadata)); HTTP_S.SetHeader("Content-Type", "binary/octet"); HTTP_S.SetHeader("Cache-Control", "no-cache"); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); HTTP_R.Clean(); //clean for any possible next requests continue; } if (HTTP_R.url.find("f4m") == std::string::npos){ Quality = HTTP_R.url.substr(HTTP_R.url.find("/", 10) + 1); Quality = Quality.substr(0, Quality.find("Seg")); int temp; temp = HTTP_R.url.find("Seg") + 3; Segment = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find("-", temp) - temp).c_str()); temp = HTTP_R.url.find("Frag") + 4; ReqFragment = atoi(HTTP_R.url.substr(temp).c_str()); #if DEBUG >= 5 printf("Quality: %s, Seg %d Frag %d\n", Quality.c_str(), Segment, ReqFragment); #endif if (Strm.metadata.isMember("live")){ int seekable = Strm.canSeekFrame(ReqFragment); if (seekable == 0){ // iff the fragment in question is available, check if the next is available too seekable = Strm.canSeekFrame(ReqFragment + 1); } if (seekable < 0){ HTTP_S.Clean(); HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n"); conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range")); HTTP_R.Clean(); //clean for any possible next requests std::cout << "Fragment @ F" << ReqFragment << " too old (F" << Strm.metadata["keynum"][0u].asInt() << " - " << Strm.metadata["keynum"][Strm.metadata["keynum"].size() - 1].asInt() << ")" << std::endl; continue; } if (seekable > 0){ HTTP_S.Clean(); HTTP_S.SetBody("Proxy, re-request this in a second or two.\n"); conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later")); HTTP_R.Clean(); //clean for any possible next requests std::cout << "Fragment @ F" << ReqFragment << " not available yet (F" << Strm.metadata["keynum"][0u].asInt() << " - " << Strm.metadata["keynum"][Strm.metadata["keynum"].size() - 1].asInt() << ")" << std::endl; continue; } } std::stringstream sstream; sstream << "f " << ReqFragment << "\no \n"; ss.SendNow(sstream.str().c_str()); }else{ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); std::string manifest = dynamicIndex(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); } HTTP_R.Clean(); //clean for any possible next requests } }else{ Util::sleep(1); } if (ss.connected()){ unsigned int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(conn.getStats("HTTP_Dynamic").c_str()); } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (Strm.lastType() == DTSC::PAUSEMARK){ if (FlashBufSize){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "video/mp4"); HTTP_S.SetBody(""); std::string new_strap = dynamicBootstrap(streamname, Strm.metadata, ReqFragment); HTTP_S.SetHeader("Content-Length", FlashBufSize + 8 + new_strap.size()); //32+33+btstrp.size()); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); conn.SendNow(new_strap); unsigned long size = htonl(FlashBufSize+8); conn.SendNow((char*) &size, 4); conn.SendNow("mdat", 4); while (FlashBuf.size() > 0){ conn.SendNow(FlashBuf.front()); FlashBuf.pop_front(); } } FlashBuf.clear(); FlashBufSize = 0; } if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){ if (FlashBufSize == 0){ //fill buffer with init data, if needed. if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ tmp.DTSCAudioInit(Strm); tmp.tagTime(Strm.getPacket(0)["time"].asInt()); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ tmp.DTSCVideoInit(Strm); tmp.tagTime(Strm.getPacket(0)["time"].asInt()); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } FlashBufTime = Strm.getPacket(0)["time"].asInt(); } tmp.DTSCLoader(Strm); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } } } if ( !ss.connected()){ break; } } } conn.close(); ss.SendNow(conn.getStats("HTTP_Dynamic").c_str()); ss.close(); return 0; } //Connector_HTTP_Dynamic main function
/// Main Connector_RTMP function int Connector_RTMP::Connector_RTMP(Socket::Connection conn){ Socket = conn; Socket.setBlocking(false); FLV::Tag tag, init_tag; DTSC::Stream Strm; while (!Socket.Received().available(1537) && Socket.connected()){Socket.spool(); Util::sleep(5);} RTMPStream::handshake_in = Socket.Received().remove(1537); RTMPStream::rec_cnt += 1537; if (RTMPStream::doHandshake()){ Socket.SendNow(RTMPStream::handshake_out); while (!Socket.Received().available(1536) && Socket.connected()){Socket.spool(); Util::sleep(5);} Socket.Received().remove(1536); RTMPStream::rec_cnt += 1536; #if DEBUG >= 4 fprintf(stderr, "Handshake succcess!\n"); #endif }else{ #if DEBUG >= 1 fprintf(stderr, "Handshake fail!\n"); #endif return 0; } unsigned int lastStats = 0; bool firsttime = true; while (Socket.connected()){ if (Socket.spool() || firsttime){ parseChunk(Socket.Received()); firsttime = false; }else{ Util::sleep(1);//sleep 1ms to prevent high CPU usage } if (ready4data){ if (!inited){ //we are ready, connect the socket! SS = Util::Stream::getStream(streamname); if (!SS.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif Socket.close();//disconnect user break; } SS.setBlocking(false); #if DEBUG >= 3 fprintf(stderr, "Everything connected, starting to send video data...\n"); #endif SS.SendNow("p\n"); inited = true; } if (inited && !nostats){ long long int now = Util::epoch(); if (now != lastStats){ lastStats = now; SS.SendNow(Socket.getStats("RTMP").c_str()); } } if (SS.spool()){ while (Strm.parsePacket(SS.Received())){ if (play_trans != -1){ //send a status reply AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus"));//status reply amfreply.addContent(AMF::Object("", (double)play_trans));//same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null - command info amfreply.addContent(AMF::Object(""));//info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting...")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, play_msgtype, play_streamid); //send streamisrecorded if stream, well, is recorded. if (Strm.metadata.isMember("length") && Strm.metadata["length"].asInt() > 0){ Socket.Send(RTMPStream::SendUSR(4, 1));//send UCM StreamIsRecorded (4), stream 1 } //send streambegin Socket.Send(RTMPStream::SendUSR(0, 1));//send UCM StreamBegin (0), stream 1 //and more reply amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus"));//status reply amfreply.addContent(AMF::Object("", (double)play_trans));//same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null - command info amfreply.addContent(AMF::Object(""));//info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, play_msgtype, play_streamid); RTMPStream::chunk_snd_max = 102400;//100KiB Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max));//send chunk size max (msg 1) //send dunno? Socket.Send(RTMPStream::SendUSR(32, 1));//send UCM no clue?, stream 1 play_trans = -1; } //sent init data if needed if (!stream_inited){ init_tag.DTSCMetaInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ init_tag.DTSCAudioInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ init_tag.DTSCVideoInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } stream_inited = true; } //sent a tag tag.DTSCLoader(Strm); Socket.SendNow(RTMPStream::SendMedia(tag)); #if DEBUG >= 8 fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str()); #endif } } } } Socket.close(); SS.SendNow(Socket.getStats("RTMP").c_str()); SS.close(); #if DEBUG >= 1 if (FLV::Parse_Error){fprintf(stderr, "FLV Parse Error: %s\n", FLV::Error_Str.c_str());} fprintf(stderr, "User %i disconnected.\n", conn.getSocket()); if (inited){ fprintf(stderr, "Status was: inited\n"); }else{ if (ready4data){ fprintf(stderr, "Status was: ready4data\n"); }else{ fprintf(stderr, "Status was: connected\n"); } } #endif return 0; }//Connector_RTMP
///\brief Main function for the TS Connector ///\param conn A socket describing the connection the client. ///\param streamName The stream to connect to. ///\return The exit code of the connector. int tsConnector(Socket::Connection conn, std::string streamName){ std::string ToPack; TS::Packet PackData; std::string DTMIData; int PacketNumber = 0; long long unsigned int TimeStamp = 0; int ThisNaluSize; char VideoCounter = 0; char AudioCounter = 0; bool WritePesHeader; bool IsKeyFrame; bool FirstKeyFrame = true; bool FirstIDRInKeyFrame; MP4::AVCC avccbox; bool haveAvcc = false; DTSC::Stream Strm; bool inited = false; Socket::Connection ss; while (conn.connected()){ if ( !inited){ ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif conn.close(); break; } ss.SendNow("p\n"); inited = true; } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if ( !haveAvcc){ avccbox.setPayload(Strm.metadata["video"]["init"].asString()); haveAvcc = true; } std::stringstream TSBuf; Socket::Buffer ToPack; //write PAT and PMT TS packets if (PacketNumber == 0){ PackData.DefaultPAT(); TSBuf.write(PackData.ToString(), 188); PackData.DefaultPMT(); TSBuf.write(PackData.ToString(), 188); PacketNumber += 2; } int PIDno = 0; char * ContCounter = 0; if (Strm.lastType() == DTSC::VIDEO){ IsKeyFrame = Strm.getPacket(0).isMember("keyframe"); if (IsKeyFrame){ TimeStamp = (Strm.getPacket(0)["time"].asInt() * 27000); } ToPack.append(avccbox.asAnnexB()); while (Strm.lastData().size()){ ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3]; Strm.lastData().replace(0, 4, TS::NalHeader, 4); if (ThisNaluSize + 4 == Strm.lastData().size()){ ToPack.append(Strm.lastData()); break; }else{ ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4); Strm.lastData().erase(0, ThisNaluSize + 4); } } ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket(0)["time"].asInt() * 90)); PIDno = 0x100; ContCounter = &VideoCounter; }else if (Strm.lastType() == DTSC::AUDIO){ ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata["audio"]["init"].asString())); ToPack.append(Strm.lastData()); ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket(0)["time"].asInt() * 90)); PIDno = 0x101; ContCounter = &AudioCounter; } //initial packet PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); PackData.UnitStart(1); if (IsKeyFrame){ PackData.RandomAccess(1); PackData.PCR(TimeStamp); } unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184)); std::string gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; //rest of packets while (ToPack.size()){ PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); toSend = PackData.AddStuffing(ToPack.bytes(184)); gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; } TSBuf.flush(); if (TSBuf.str().size()){ conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size()); TSBuf.str(""); } TSBuf.str(""); PacketNumber = 0; } } } return 0; }
///\brief Main function for the RTMP Connector ///\param conn A socket describing the connection the client. ///\return The exit code of the connector. int rtmpConnector(Socket::Connection conn){ Socket = conn; Socket.setBlocking(false); FLV::Tag tag, init_tag; DTSC::Stream Strm; while ( !Socket.Received().available(1537) && Socket.connected()){ Socket.spool(); Util::sleep(5); } RTMPStream::handshake_in = Socket.Received().remove(1537); RTMPStream::rec_cnt += 1537; if (RTMPStream::doHandshake()){ Socket.SendNow(RTMPStream::handshake_out); while ( !Socket.Received().available(1536) && Socket.connected()){ Socket.spool(); Util::sleep(5); } Socket.Received().remove(1536); RTMPStream::rec_cnt += 1536; #if DEBUG >= 5 fprintf(stderr, "Handshake succcess!\n"); #endif }else{ #if DEBUG >= 5 fprintf(stderr, "Handshake fail!\n"); #endif return 0; } unsigned int lastStats = 0; bool firsttime = true; while (Socket.connected()){ if (Socket.spool() || firsttime){ parseChunk(Socket.Received()); firsttime = false; }else{ Util::sleep(1); //sleep 1ms to prevent high CPU usage } if (ready4data){ if ( !inited){ //we are ready, connect the socket! ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif Socket.close(); //disconnect user break; } ss.setBlocking(false); Strm.waitForMeta(ss); //find first audio and video tracks for (JSON::ObjIter objIt = Strm.metadata["tracks"].ObjBegin(); objIt != Strm.metadata["tracks"].ObjEnd(); objIt++){ if (videoID == -1 && objIt->second["type"].asStringRef() == "video"){ videoID = objIt->second["trackid"].asInt(); } if (audioID == -1 && objIt->second["type"].asStringRef() == "audio"){ audioID = objIt->second["trackid"].asInt(); } } //select the tracks and play std::stringstream cmd; cmd << "t"; if (videoID != -1){ cmd << " " << videoID; } if (audioID != -1){ cmd << " " << audioID; } cmd << "\np\n"; ss.SendNow(cmd.str().c_str()); inited = true; } if (inited && !noStats){ long long int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(Socket.getStats("RTMP")); } } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (playTransaction != -1){ //send a status reply AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus")); //status reply amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfreply.addContent(AMF::Object("")); //info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting...")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, playMessageType, playStreamId); //send streamisrecorded if stream, well, is recorded. if (Strm.metadata.isMember("length") && Strm.metadata["length"].asInt() > 0){ Socket.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1 } //send streambegin Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1 //and more reply amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus")); //status reply amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfreply.addContent(AMF::Object("")); //info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, playMessageType, playStreamId); RTMPStream::chunk_snd_max = 102400; //100KiB Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1) //send dunno? Socket.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1 playTransaction = -1; } //sent init data if needed if ( !streamInited){ init_tag.DTSCMetaInit(Strm, Strm.getTrackById(videoID), Strm.getTrackById(audioID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); if (audioID != -1 && Strm.getTrackById(audioID).isMember("init")){ init_tag.DTSCAudioInit(Strm.getTrackById(audioID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } if (videoID != -1 && Strm.getTrackById(videoID).isMember("init")){ init_tag.DTSCVideoInit(Strm.getTrackById(videoID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } streamInited = true; } //sent a tag tag.DTSCLoader(Strm); Socket.SendNow(RTMPStream::SendMedia(tag)); #if DEBUG >= 8 fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str()); #endif } } } } Socket.close(); ss.SendNow(Socket.getStats("RTMP").c_str()); ss.close(); return 0; } //Connector_RTMP