///\brief Converts DTSC from stdin to FLV on stdout. ///\return The return code for the converter. int DTSC2FLV(){ FLV::Tag FLV_out; // Temporary storage for outgoing FLV data. DTSC::Stream Strm; std::string inBuffer; char charBuffer[1024 * 10]; unsigned int charCount; bool doneheader = false; int videoID = -1, audioID = -1; while (std::cin.good()){ if (Strm.parsePacket(inBuffer)){ if ( !doneheader){ //find first audio and video tracks for (JSON::ObjIter objIt = Strm.metadata["tracks"].ObjBegin(); objIt != Strm.metadata["tracks"].ObjEnd(); objIt++){ if (videoID == -1 && objIt->second["type"].asString() == "video"){ videoID = objIt->second["trackid"].asInt(); } if (audioID == -1 && objIt->second["type"].asString() == "audio"){ audioID = objIt->second["trackid"].asInt(); } } doneheader = true; std::cout.write(FLV::Header, 13); FLV_out.DTSCMetaInit(Strm, Strm.getTrackById(videoID), Strm.getTrackById(audioID)); std::cout.write(FLV_out.data, FLV_out.len); if (videoID && Strm.getTrackById(videoID).isMember("init")){ FLV_out.DTSCVideoInit(Strm.getTrackById(videoID)); std::cout.write(FLV_out.data, FLV_out.len); } if (audioID && Strm.getTrackById(audioID).isMember("init")){ FLV_out.DTSCAudioInit(Strm.getTrackById(audioID)); std::cout.write(FLV_out.data, FLV_out.len); } } if (FLV_out.DTSCLoader(Strm)){ std::cout.write(FLV_out.data, FLV_out.len); } }else{ std::cin.read(charBuffer, 1024 * 10); charCount = std::cin.gcount(); inBuffer.append(charBuffer, charCount); } } std::cerr << "Done!" << std::endl; return 0; } //FLV2DTSC
///\brief Converts DTSC from stdin to FLV on stdout. ///\return The return code for the converter. int DTSC2FLV(){ FLV::Tag FLV_out; // Temporary storage for outgoing FLV data. DTSC::Stream Strm; std::string inBuffer; char charBuffer[1024 * 10]; unsigned int charCount; bool doneheader = false; while (std::cin.good()){ if (Strm.parsePacket(inBuffer)){ if ( !doneheader){ doneheader = true; std::cout.write(FLV::Header, 13); FLV_out.DTSCMetaInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ FLV_out.DTSCVideoInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); } if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ FLV_out.DTSCAudioInit(Strm); std::cout.write(FLV_out.data, FLV_out.len); } } if (FLV_out.DTSCLoader(Strm)){ std::cout.write(FLV_out.data, FLV_out.len); } }else{ std::cin.read(charBuffer, 1024 * 10); charCount = std::cin.gcount(); inBuffer.append(charBuffer, charCount); } } std::cerr << "Done!" << std::endl; return 0; } //FLV2DTSC
///\brief Main function for the TS Connector ///\param conn A socket describing the connection the client. ///\param streamName The stream to connect to. ///\return The exit code of the connector. int tsConnector(Socket::Connection conn, std::string streamName, std::string trackIDs){ std::string ToPack; TS::Packet PackData; std::string DTMIData; int PacketNumber = 0; long long unsigned int TimeStamp = 0; int ThisNaluSize; char VideoCounter = 0; char AudioCounter = 0; bool WritePesHeader; bool IsKeyFrame; bool FirstKeyFrame = true; bool FirstIDRInKeyFrame; MP4::AVCC avccbox; bool haveAvcc = false; DTSC::Stream Strm; bool inited = false; Socket::Connection ss; while (conn.connected()){ if ( !inited){ ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif conn.close(); break; } if(trackIDs == ""){ // no track ids given? Find the first video and first audio track (if available) and use those! int videoID = -1; int audioID = -1; Strm.waitForMeta(ss); if (Strm.metadata.isMember("tracks")){ for (JSON::ObjIter trackIt = Strm.metadata["tracks"].ObjBegin(); trackIt != Strm.metadata["tracks"].ObjEnd(); trackIt++){ if (audioID == -1 && trackIt->second["type"].asString() == "audio"){ audioID = trackIt->second["trackid"].asInt(); if( trackIDs != ""){ trackIDs += " " + trackIt->second["trackid"].asString(); }else{ trackIDs = trackIt->second["trackid"].asString(); } } if (videoID == -1 && trackIt->second["type"].asString() == "video"){ videoID = trackIt->second["trackid"].asInt(); if( trackIDs != ""){ trackIDs += " " + trackIt->second["trackid"].asString(); }else{ trackIDs = trackIt->second["trackid"].asString(); } } } // for iterator } // if isMember("tracks") } // if trackIDs == "" std::string cmd = "t " + trackIDs + "\ns 0\np\n"; ss.SendNow( cmd ); inited = true; } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ std::stringstream TSBuf; Socket::Buffer ToPack; //write PAT and PMT TS packets if (PacketNumber == 0){ PackData.DefaultPAT(); TSBuf.write(PackData.ToString(), 188); PackData.DefaultPMT(); TSBuf.write(PackData.ToString(), 188); PacketNumber += 2; } int PIDno = 0; char * ContCounter = 0; if (Strm.lastType() == DTSC::VIDEO){ if ( !haveAvcc){ avccbox.setPayload(Strm.getTrackById(Strm.getPacket()["trackid"].asInt())["init"].asString()); haveAvcc = true; } IsKeyFrame = Strm.getPacket().isMember("keyframe"); if (IsKeyFrame){ TimeStamp = (Strm.getPacket()["time"].asInt() * 27000); } ToPack.append(avccbox.asAnnexB()); while (Strm.lastData().size() > 4){ ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3]; Strm.lastData().replace(0, 4, TS::NalHeader, 4); if (ThisNaluSize + 4 == Strm.lastData().size()){ ToPack.append(Strm.lastData()); break; }else{ ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4); Strm.lastData().erase(0, ThisNaluSize + 4); } } ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket()["time"].asInt() * 90)); PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt(); ContCounter = &VideoCounter; }else if (Strm.lastType() == DTSC::AUDIO){ ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.getTrackById(Strm.getPacket()["trackid"].asInt())["init"].asString())); ToPack.append(Strm.lastData()); ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket()["time"].asInt() * 90)); PIDno = 0x100 - 1 + Strm.getPacket()["trackid"].asInt(); ContCounter = &AudioCounter; IsKeyFrame = false; } //initial packet PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); PackData.UnitStart(1); if (IsKeyFrame){ PackData.RandomAccess(1); PackData.PCR(TimeStamp); } unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184)); std::string gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; //rest of packets while (ToPack.size()){ PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); toSend = PackData.AddStuffing(ToPack.bytes(184)); gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; } TSBuf.flush(); if (TSBuf.str().size()){ conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size()); TSBuf.str(""); } TSBuf.str(""); PacketNumber = 0; } }else{ Util::sleep(1000); conn.spool(); } } return 0; }
/// Main function for Connector_HTTP_Dynamic int Connector_HTTP_Dynamic(Socket::Connection conn){ std::deque<std::string> FlashBuf; std::vector<int> Timestamps; int FlashBufSize = 0; long long int FlashBufTime = 0; DTSC::Stream Strm; //Incoming stream buffer. HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender. bool ready4data = false; //Set to true when streaming is to begin. bool pending_manifest = false; bool receive_marks = false; //when set to true, this stream will ignore keyframes and instead use pause marks bool inited = false; Socket::Connection ss( -1); std::string streamname; std::string recBuffer = ""; bool wantsVideo = false; bool wantsAudio = false; std::string Quality; int Segment = -1; long long int ReqFragment = -1; int temp; std::string tempStr; int Flash_RequestPending = 0; unsigned int lastStats = 0; conn.setBlocking(false); //do not block on conn.spool() when no data is available while (conn.connected()){ if (conn.spool() || conn.Received().size()){ //make sure it ends in a \n if ( *(conn.Received().get().rbegin()) != '\n'){ std::string tmp = conn.Received().get(); conn.Received().get().clear(); if (conn.Received().size()){ conn.Received().get().insert(0, tmp); }else{ conn.Received().append(tmp); } } if (HTTP_R.Read(conn.Received().get())){ #if DEBUG >= 4 std::cout << "Received request: " << HTTP_R.getUrl() << std::endl; #endif conn.setHost(HTTP_R.GetHeader("X-Origin")); if (HTTP_R.url.find("Manifest") == std::string::npos){ streamname = HTTP_R.url.substr(8, HTTP_R.url.find("/", 8) - 12); if ( !ss){ ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif ss.close(); HTTP_S.Clean(); HTTP_S.SetBody("No such stream " + streamname + " is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); ready4data = false; continue; } ss.setBlocking(false); inited = true; } Quality = HTTP_R.url.substr(HTTP_R.url.find("/Q(", 8) + 3); Quality = Quality.substr(0, Quality.find(")")); tempStr = HTTP_R.url.substr(HTTP_R.url.find(")/") + 2); wantsAudio = false; wantsVideo = false; if (tempStr[0] == 'A'){ wantsAudio = true; } if (tempStr[0] == 'V'){ wantsVideo = true; } tempStr = tempStr.substr(tempStr.find("(") + 1); ReqFragment = atoll(tempStr.substr(0, tempStr.find(")")).c_str()); #if DEBUG >= 4 printf("Quality: %s, Frag %d\n", Quality.c_str(), (ReqFragment / 10000)); #endif std::stringstream sstream; sstream << "s " << (ReqFragment / 10000) << "\no \n"; ss.SendNow(sstream.str().c_str()); Flash_RequestPending++; }else{ streamname = HTTP_R.url.substr(8, HTTP_R.url.find("/", 8) - 12); if ( !Strm.metadata.isNull()){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; }else{ pending_manifest = true; } } ready4data = true; HTTP_R.Clean(); //clean for any possible next requests } }else{ if (Flash_RequestPending){ usleep(1000); //sleep 1ms }else{ usleep(10000); //sleep 10ms } } if (ready4data){ if ( !inited){ //we are ready, connect the socket! ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif ss.close(); HTTP_S.Clean(); HTTP_S.SetBody("No such stream " + streamname + " is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); ready4data = false; continue; } ss.setBlocking(false); #if DEBUG >= 3 fprintf(stderr, "Everything connected, starting to send video data...\n"); #endif inited = true; } unsigned int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(conn.getStats("HTTP_Smooth").c_str()); } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (Strm.getPacket(0).isMember("time")){ if ( !Strm.metadata.isMember("firsttime")){ Strm.metadata["firsttime"] = Strm.getPacket(0)["time"]; }else{ if ( !Strm.metadata.isMember("length") || Strm.metadata["length"].asInt() == 0){ Strm.getPacket(0)["time"] = Strm.getPacket(0)["time"].asInt() - Strm.metadata["firsttime"].asInt(); } } Strm.metadata["lasttime"] = Strm.getPacket(0)["time"]; } if (pending_manifest){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; } if ( !receive_marks && Strm.metadata.isMember("length")){ receive_marks = true; } if (Strm.lastType() == DTSC::PAUSEMARK){ Timestamps.push_back(Strm.getPacket(0)["time"].asInt()); } if ((Strm.getPacket(0).isMember("keyframe") && !receive_marks) || Strm.lastType() == DTSC::PAUSEMARK){ #if DEBUG >= 4 fprintf(stderr, "Received a %s fragment of %i bytes.\n", Strm.getPacket(0)["datatype"].asString().c_str(), FlashBufSize); #endif if (Flash_RequestPending > 0 && FlashBufSize){ #if DEBUG >= 3 fprintf(stderr, "Sending a fragment..."); #endif //static std::string btstrp; //btstrp = GenerateBootstrap(streamname, Strm.metadata, ReqFragment, FlashBufTime, Strm.getPacket(0)["time"]); HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "video/mp4"); HTTP_S.SetBody(""); int myDuration; MP4::MFHD mfhd_box; for (int i = 0; i < Strm.metadata["keytime"].size(); i++){ if (Strm.metadata["keytime"][i].asInt() >= (ReqFragment / 10000)){ mfhd_box.setSequenceNumber(i + 1); if (i != Strm.metadata["keytime"].size()){ myDuration = Strm.metadata["keytime"][i + 1].asInt() - Strm.metadata["keytime"][i].asInt(); }else{ myDuration = Strm.metadata["lastms"].asInt() - Strm.metadata["keytime"][i].asInt(); } myDuration = myDuration * 10000; break; } } MP4::TFHD tfhd_box; tfhd_box.setFlags(MP4::tfhdSampleFlag); tfhd_box.setTrackID(1); tfhd_box.setDefaultSampleFlags(0x000000C0 | MP4::noIPicture | MP4::noDisposable | MP4::noKeySample); MP4::TRUN trun_box; //maybe reinsert dataOffset trun_box.setFlags(MP4::trundataOffset | MP4::trunfirstSampleFlags | MP4::trunsampleDuration | MP4::trunsampleSize); trun_box.setDataOffset(42); trun_box.setFirstSampleFlags(0x00000040 | MP4::isIPicture | MP4::noDisposable | MP4::isKeySample); for (int i = 0; i < FlashBuf.size(); i++){ MP4::trunSampleInformation trunSample; trunSample.sampleSize = FlashBuf[i].size(); //trunSample.sampleDuration = (Timestamps[i+1]-Timestamps[i]) * 10000; trunSample.sampleDuration = (((double)myDuration / FlashBuf.size()) * i) - (((double)myDuration / FlashBuf.size()) * (i - 1)); trun_box.setSampleInformation(trunSample, i); } MP4::SDTP sdtp_box; sdtp_box.setVersion(0); sdtp_box.setValue(0x24, 4); for (int i = 1; i < FlashBuf.size(); i++){ sdtp_box.setValue(0x14, 4 + i); } MP4::TRAF traf_box; traf_box.setContent(tfhd_box, 0); traf_box.setContent(trun_box, 1); traf_box.setContent(sdtp_box, 2); MP4::MOOF moof_box; moof_box.setContent(mfhd_box, 0); moof_box.setContent(traf_box, 1); //setting tha offsets! trun_box.setDataOffset(moof_box.boxedSize() + 8); traf_box.setContent(trun_box, 1); moof_box.setContent(traf_box, 1); //std::cerr << "\t[encoded] = " << ((MP4::TRUN&)(((MP4::TRAF&)(moof_box.getContent(1))).getContent(1))).getDataOffset() << std::endl; HTTP_S.SetHeader("Content-Length", FlashBufSize + 8 + moof_box.boxedSize()); //32+33+btstrp.size()); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); conn.SendNow(moof_box.asBox(), moof_box.boxedSize()); unsigned long size = htonl(FlashBufSize+8); conn.SendNow((char*) &size, 4); conn.SendNow("mdat", 4); while (FlashBuf.size() > 0){ conn.SendNow(FlashBuf.front()); FlashBuf.pop_front(); } Flash_RequestPending--; #if DEBUG >= 3 fprintf(stderr, "Done\n"); #endif } FlashBuf.clear(); FlashBufSize = 0; } if ((wantsAudio && Strm.lastType() == DTSC::AUDIO) || (wantsVideo && Strm.lastType() == DTSC::VIDEO)){ FlashBuf.push_back(Strm.lastData()); FlashBufSize += Strm.lastData().size(); Timestamps.push_back(Strm.getPacket(0)["time"].asInt()); } } if (pending_manifest && !Strm.metadata.isNull()){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); if (Strm.metadata.isMember("length")){ receive_marks = true; } std::string manifest = BuildManifest(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); #if DEBUG >= 3 printf("Sent manifest\n"); #endif pending_manifest = false; } } if ( !ss.connected()){ break; } } } conn.close(); ss.SendNow(conn.getStats("HTTP_Smooth").c_str()); ss.close(); return 0; } //Connector_HTTP_Smooth main function
///\brief Main function for the HTTP Dynamic Connector ///\param conn A socket describing the connection the client. ///\return The exit code of the connector. int dynamicConnector(Socket::Connection conn){ std::deque<std::string> FlashBuf; int FlashBufSize = 0; long long int FlashBufTime = 0; FLV::Tag tmp; //temporary tag DTSC::Stream Strm; //Incoming stream buffer. HTTP::Parser HTTP_R, HTTP_S; //HTTP Receiver en HTTP Sender. Socket::Connection ss( -1); std::string streamname; std::string recBuffer = ""; std::string Quality; int Segment = -1; int ReqFragment = -1; unsigned int lastStats = 0; conn.setBlocking(false); //do not block on conn.spool() when no data is available while (conn.connected()){ if (conn.spool() || conn.Received().size()){ //make sure it ends in a \n if ( *(conn.Received().get().rbegin()) != '\n'){ std::string tmp = conn.Received().get(); conn.Received().get().clear(); if (conn.Received().size()){ conn.Received().get().insert(0, tmp); }else{ conn.Received().append(tmp); } } if (HTTP_R.Read(conn.Received().get())){ #if DEBUG >= 5 std::cout << "Received request: " << HTTP_R.getUrl() << std::endl; #endif conn.setHost(HTTP_R.GetHeader("X-Origin")); streamname = HTTP_R.GetHeader("X-Stream"); if ( !ss){ ss = Util::Stream::getStream(streamname); if ( !ss.connected()){ HTTP_S.Clean(); HTTP_S.SetBody("No such stream is available on the system. Please try again.\n"); conn.SendNow(HTTP_S.BuildResponse("404", "Not found")); continue; } ss.setBlocking(false); //make sure metadata is received while ( !Strm.metadata && ss.connected()){ if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ //do nothing } } } } if (HTTP_R.url.find(".abst") != std::string::npos){ HTTP_S.Clean(); HTTP_S.SetBody(dynamicBootstrap(streamname, Strm.metadata)); HTTP_S.SetHeader("Content-Type", "binary/octet"); HTTP_S.SetHeader("Cache-Control", "no-cache"); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); HTTP_R.Clean(); //clean for any possible next requests continue; } if (HTTP_R.url.find("f4m") == std::string::npos){ Quality = HTTP_R.url.substr(HTTP_R.url.find("/", 10) + 1); Quality = Quality.substr(0, Quality.find("Seg")); int temp; temp = HTTP_R.url.find("Seg") + 3; Segment = atoi(HTTP_R.url.substr(temp, HTTP_R.url.find("-", temp) - temp).c_str()); temp = HTTP_R.url.find("Frag") + 4; ReqFragment = atoi(HTTP_R.url.substr(temp).c_str()); #if DEBUG >= 5 printf("Quality: %s, Seg %d Frag %d\n", Quality.c_str(), Segment, ReqFragment); #endif if (Strm.metadata.isMember("live")){ int seekable = Strm.canSeekFrame(ReqFragment); if (seekable == 0){ // iff the fragment in question is available, check if the next is available too seekable = Strm.canSeekFrame(ReqFragment + 1); } if (seekable < 0){ HTTP_S.Clean(); HTTP_S.SetBody("The requested fragment is no longer kept in memory on the server and cannot be served.\n"); conn.SendNow(HTTP_S.BuildResponse("412", "Fragment out of range")); HTTP_R.Clean(); //clean for any possible next requests std::cout << "Fragment @ F" << ReqFragment << " too old (F" << Strm.metadata["keynum"][0u].asInt() << " - " << Strm.metadata["keynum"][Strm.metadata["keynum"].size() - 1].asInt() << ")" << std::endl; continue; } if (seekable > 0){ HTTP_S.Clean(); HTTP_S.SetBody("Proxy, re-request this in a second or two.\n"); conn.SendNow(HTTP_S.BuildResponse("208", "Ask again later")); HTTP_R.Clean(); //clean for any possible next requests std::cout << "Fragment @ F" << ReqFragment << " not available yet (F" << Strm.metadata["keynum"][0u].asInt() << " - " << Strm.metadata["keynum"][Strm.metadata["keynum"].size() - 1].asInt() << ")" << std::endl; continue; } } std::stringstream sstream; sstream << "f " << ReqFragment << "\no \n"; ss.SendNow(sstream.str().c_str()); }else{ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "text/xml"); HTTP_S.SetHeader("Cache-Control", "no-cache"); std::string manifest = dynamicIndex(streamname, Strm.metadata); HTTP_S.SetBody(manifest); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); } HTTP_R.Clean(); //clean for any possible next requests } }else{ Util::sleep(1); } if (ss.connected()){ unsigned int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(conn.getStats("HTTP_Dynamic").c_str()); } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (Strm.lastType() == DTSC::PAUSEMARK){ if (FlashBufSize){ HTTP_S.Clean(); HTTP_S.SetHeader("Content-Type", "video/mp4"); HTTP_S.SetBody(""); std::string new_strap = dynamicBootstrap(streamname, Strm.metadata, ReqFragment); HTTP_S.SetHeader("Content-Length", FlashBufSize + 8 + new_strap.size()); //32+33+btstrp.size()); conn.SendNow(HTTP_S.BuildResponse("200", "OK")); conn.SendNow(new_strap); unsigned long size = htonl(FlashBufSize+8); conn.SendNow((char*) &size, 4); conn.SendNow("mdat", 4); while (FlashBuf.size() > 0){ conn.SendNow(FlashBuf.front()); FlashBuf.pop_front(); } } FlashBuf.clear(); FlashBufSize = 0; } if (Strm.lastType() == DTSC::VIDEO || Strm.lastType() == DTSC::AUDIO){ if (FlashBufSize == 0){ //fill buffer with init data, if needed. if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ tmp.DTSCAudioInit(Strm); tmp.tagTime(Strm.getPacket(0)["time"].asInt()); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ tmp.DTSCVideoInit(Strm); tmp.tagTime(Strm.getPacket(0)["time"].asInt()); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } FlashBufTime = Strm.getPacket(0)["time"].asInt(); } tmp.DTSCLoader(Strm); FlashBuf.push_back(std::string(tmp.data, tmp.len)); FlashBufSize += tmp.len; } } } if ( !ss.connected()){ break; } } } conn.close(); ss.SendNow(conn.getStats("HTTP_Dynamic").c_str()); ss.close(); return 0; } //Connector_HTTP_Dynamic main function
/// Main Connector_RTMP function int Connector_RTMP::Connector_RTMP(Socket::Connection conn){ Socket = conn; Socket.setBlocking(false); FLV::Tag tag, init_tag; DTSC::Stream Strm; while (!Socket.Received().available(1537) && Socket.connected()){Socket.spool(); Util::sleep(5);} RTMPStream::handshake_in = Socket.Received().remove(1537); RTMPStream::rec_cnt += 1537; if (RTMPStream::doHandshake()){ Socket.SendNow(RTMPStream::handshake_out); while (!Socket.Received().available(1536) && Socket.connected()){Socket.spool(); Util::sleep(5);} Socket.Received().remove(1536); RTMPStream::rec_cnt += 1536; #if DEBUG >= 4 fprintf(stderr, "Handshake succcess!\n"); #endif }else{ #if DEBUG >= 1 fprintf(stderr, "Handshake fail!\n"); #endif return 0; } unsigned int lastStats = 0; bool firsttime = true; while (Socket.connected()){ if (Socket.spool() || firsttime){ parseChunk(Socket.Received()); firsttime = false; }else{ Util::sleep(1);//sleep 1ms to prevent high CPU usage } if (ready4data){ if (!inited){ //we are ready, connect the socket! SS = Util::Stream::getStream(streamname); if (!SS.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif Socket.close();//disconnect user break; } SS.setBlocking(false); #if DEBUG >= 3 fprintf(stderr, "Everything connected, starting to send video data...\n"); #endif SS.SendNow("p\n"); inited = true; } if (inited && !nostats){ long long int now = Util::epoch(); if (now != lastStats){ lastStats = now; SS.SendNow(Socket.getStats("RTMP").c_str()); } } if (SS.spool()){ while (Strm.parsePacket(SS.Received())){ if (play_trans != -1){ //send a status reply AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus"));//status reply amfreply.addContent(AMF::Object("", (double)play_trans));//same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null - command info amfreply.addContent(AMF::Object(""));//info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting...")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, play_msgtype, play_streamid); //send streamisrecorded if stream, well, is recorded. if (Strm.metadata.isMember("length") && Strm.metadata["length"].asInt() > 0){ Socket.Send(RTMPStream::SendUSR(4, 1));//send UCM StreamIsRecorded (4), stream 1 } //send streambegin Socket.Send(RTMPStream::SendUSR(0, 1));//send UCM StreamBegin (0), stream 1 //and more reply amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus"));//status reply amfreply.addContent(AMF::Object("", (double)play_trans));//same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null - command info amfreply.addContent(AMF::Object(""));//info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, play_msgtype, play_streamid); RTMPStream::chunk_snd_max = 102400;//100KiB Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max));//send chunk size max (msg 1) //send dunno? Socket.Send(RTMPStream::SendUSR(32, 1));//send UCM no clue?, stream 1 play_trans = -1; } //sent init data if needed if (!stream_inited){ init_tag.DTSCMetaInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); if (Strm.metadata.isMember("audio") && Strm.metadata["audio"].isMember("init")){ init_tag.DTSCAudioInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } if (Strm.metadata.isMember("video") && Strm.metadata["video"].isMember("init")){ init_tag.DTSCVideoInit(Strm); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } stream_inited = true; } //sent a tag tag.DTSCLoader(Strm); Socket.SendNow(RTMPStream::SendMedia(tag)); #if DEBUG >= 8 fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str()); #endif } } } } Socket.close(); SS.SendNow(Socket.getStats("RTMP").c_str()); SS.close(); #if DEBUG >= 1 if (FLV::Parse_Error){fprintf(stderr, "FLV Parse Error: %s\n", FLV::Error_Str.c_str());} fprintf(stderr, "User %i disconnected.\n", conn.getSocket()); if (inited){ fprintf(stderr, "Status was: inited\n"); }else{ if (ready4data){ fprintf(stderr, "Status was: ready4data\n"); }else{ fprintf(stderr, "Status was: connected\n"); } } #endif return 0; }//Connector_RTMP
///\brief Main function for the TS Connector ///\param conn A socket describing the connection the client. ///\param streamName The stream to connect to. ///\return The exit code of the connector. int tsConnector(Socket::Connection conn, std::string streamName){ std::string ToPack; TS::Packet PackData; std::string DTMIData; int PacketNumber = 0; long long unsigned int TimeStamp = 0; int ThisNaluSize; char VideoCounter = 0; char AudioCounter = 0; bool WritePesHeader; bool IsKeyFrame; bool FirstKeyFrame = true; bool FirstIDRInKeyFrame; MP4::AVCC avccbox; bool haveAvcc = false; DTSC::Stream Strm; bool inited = false; Socket::Connection ss; while (conn.connected()){ if ( !inited){ ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif conn.close(); break; } ss.SendNow("p\n"); inited = true; } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if ( !haveAvcc){ avccbox.setPayload(Strm.metadata["video"]["init"].asString()); haveAvcc = true; } std::stringstream TSBuf; Socket::Buffer ToPack; //write PAT and PMT TS packets if (PacketNumber == 0){ PackData.DefaultPAT(); TSBuf.write(PackData.ToString(), 188); PackData.DefaultPMT(); TSBuf.write(PackData.ToString(), 188); PacketNumber += 2; } int PIDno = 0; char * ContCounter = 0; if (Strm.lastType() == DTSC::VIDEO){ IsKeyFrame = Strm.getPacket(0).isMember("keyframe"); if (IsKeyFrame){ TimeStamp = (Strm.getPacket(0)["time"].asInt() * 27000); } ToPack.append(avccbox.asAnnexB()); while (Strm.lastData().size()){ ThisNaluSize = (Strm.lastData()[0] << 24) + (Strm.lastData()[1] << 16) + (Strm.lastData()[2] << 8) + Strm.lastData()[3]; Strm.lastData().replace(0, 4, TS::NalHeader, 4); if (ThisNaluSize + 4 == Strm.lastData().size()){ ToPack.append(Strm.lastData()); break; }else{ ToPack.append(Strm.lastData().c_str(), ThisNaluSize + 4); Strm.lastData().erase(0, ThisNaluSize + 4); } } ToPack.prepend(TS::Packet::getPESVideoLeadIn(0ul, Strm.getPacket(0)["time"].asInt() * 90)); PIDno = 0x100; ContCounter = &VideoCounter; }else if (Strm.lastType() == DTSC::AUDIO){ ToPack.append(TS::GetAudioHeader(Strm.lastData().size(), Strm.metadata["audio"]["init"].asString())); ToPack.append(Strm.lastData()); ToPack.prepend(TS::Packet::getPESAudioLeadIn(ToPack.bytes(1073741824ul), Strm.getPacket(0)["time"].asInt() * 90)); PIDno = 0x101; ContCounter = &AudioCounter; } //initial packet PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); PackData.UnitStart(1); if (IsKeyFrame){ PackData.RandomAccess(1); PackData.PCR(TimeStamp); } unsigned int toSend = PackData.AddStuffing(ToPack.bytes(184)); std::string gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; //rest of packets while (ToPack.size()){ PackData.Clear(); PackData.PID(PIDno); PackData.ContinuityCounter(( *ContCounter)++); toSend = PackData.AddStuffing(ToPack.bytes(184)); gonnaSend = ToPack.remove(toSend); PackData.FillFree(gonnaSend); TSBuf.write(PackData.ToString(), 188); PacketNumber++; } TSBuf.flush(); if (TSBuf.str().size()){ conn.SendNow(TSBuf.str().c_str(), TSBuf.str().size()); TSBuf.str(""); } TSBuf.str(""); PacketNumber = 0; } } } return 0; }
///\brief Parses a single AMF command message, and sends a direct response through sendCommand(). ///\param amfData The received request. ///\param messageType The type of message. ///\param streamId The ID of the AMF stream. void parseAMFCommand(AMF::Object & amfData, int messageType, int streamId){ #if DEBUG >= 5 fprintf(stderr, "Received command: %s\n", amfData.Print().c_str()); #endif #if DEBUG >= 8 fprintf(stderr, "AMF0 command: %s\n", amfData.getContentP(0)->StrValue().c_str()); #endif if (amfData.getContentP(0)->StrValue() == "connect"){ double objencoding = 0; if (amfData.getContentP(2)->getContentP("objectEncoding")){ objencoding = amfData.getContentP(2)->getContentP("objectEncoding")->NumValue(); } #if DEBUG >= 6 int tmpint; if (amfData.getContentP(2)->getContentP("videoCodecs")){ tmpint = (int)amfData.getContentP(2)->getContentP("videoCodecs")->NumValue(); if (tmpint & 0x04){ fprintf(stderr, "Sorensen video support detected\n"); } if (tmpint & 0x80){ fprintf(stderr, "H264 video support detected\n"); } } if (amfData.getContentP(2)->getContentP("audioCodecs")){ tmpint = (int)amfData.getContentP(2)->getContentP("audioCodecs")->NumValue(); if (tmpint & 0x04){ fprintf(stderr, "MP3 audio support detected\n"); } if (tmpint & 0x400){ fprintf(stderr, "AAC audio support detected\n"); } } #endif app_name = amfData.getContentP(2)->getContentP("tcUrl")->StrValue(); app_name = app_name.substr(app_name.find('/', 7) + 1); RTMPStream::chunk_snd_max = 4096; Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1) Socket.Send(RTMPStream::SendCTL(5, RTMPStream::snd_window_size)); //send window acknowledgement size (msg 5) Socket.Send(RTMPStream::SendCTL(6, RTMPStream::rec_window_size)); //send rec window acknowledgement size (msg 6) Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1 //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("")); //server properties amfReply.getContentP(2)->addContent(AMF::Object("fmsVer", "FMS/3,5,5,2004")); amfReply.getContentP(2)->addContent(AMF::Object("capabilities", (double)31)); amfReply.getContentP(2)->addContent(AMF::Object("mode", (double)1)); amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("level", "status")); amfReply.getContentP(3)->addContent(AMF::Object("code", "NetConnection.Connect.Success")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Connection succeeded.")); amfReply.getContentP(3)->addContent(AMF::Object("clientid", 1337)); amfReply.getContentP(3)->addContent(AMF::Object("objectEncoding", objencoding)); //amfReply.getContentP(3)->addContent(AMF::Object("data", AMF::AMF0_ECMA_ARRAY)); //amfReply.getContentP(3)->getContentP(4)->addContent(AMF::Object("version", "3,5,4,1004")); sendCommand(amfReply, messageType, streamId); //send onBWDone packet - no clue what it is, but real server sends it... //amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); //amfReply.addContent(AMF::Object("", "onBWDone"));//result //amfReply.addContent(amfData.getContent(1));//same transaction ID //amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL));//null //sendCommand(amfReply, messageType, streamId); return; } //connect if (amfData.getContentP(0)->StrValue() == "createStream"){ //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("", (double)1)); //stream ID - we use 1 sendCommand(amfReply, messageType, streamId); Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1 return; } //createStream if ((amfData.getContentP(0)->StrValue() == "closeStream") || (amfData.getContentP(0)->StrValue() == "deleteStream")){ if (ss.connected()){ ss.close(); } return; } if ((amfData.getContentP(0)->StrValue() == "FCUnpublish") || (amfData.getContentP(0)->StrValue() == "releaseStream")){ // ignored return; } if ((amfData.getContentP(0)->StrValue() == "FCPublish")){ //send a FCPublic reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "onFCPublish")); //status reply amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Please followup with publish command...")); sendCommand(amfReply, messageType, streamId); return; } //FCPublish if (amfData.getContentP(0)->StrValue() == "releaseStream"){ //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("", AMF::AMF0_UNDEFINED)); //stream ID? sendCommand(amfReply, messageType, streamId); return; }//releaseStream if ((amfData.getContentP(0)->StrValue() == "getStreamLength") || (amfData.getContentP(0)->StrValue() == "getMovLen")){ //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("", (double)0)); //zero length sendCommand(amfReply, messageType, streamId); return; } //getStreamLength if ((amfData.getContentP(0)->StrValue() == "publish")){ if (amfData.getContentP(3)){ streamName = amfData.getContentP(3)->StrValue(); /// \todo implement push for MistPlayer or restrict and change to getLive ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif Socket.close(); //disconnect user return; } DTSC::Stream Strm; Strm.waitForMeta(ss); ss.Send("P "); ss.Send(Socket.getHost().c_str()); ss.Send(" "); ss.Send(app_name); ss.SendNow("\n"); streamReset = true; noStats = true; } //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("", 1, AMF::AMF0_BOOL)); //publish success? sendCommand(amfReply, messageType, streamId); Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1 //send a status reply amfReply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "onStatus")); //status reply amfReply.addContent(AMF::Object("", 0, AMF::AMF0_NUMBER)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("level", "status")); amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Publish.Start")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Stream is now published!")); amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfReply, messageType, streamId); return; } //getStreamLength if (amfData.getContentP(0)->StrValue() == "checkBandwidth"){ //send a _result reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "_result")); //result success amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info sendCommand(amfReply, messageType, streamId); return; } //checkBandwidth if ((amfData.getContentP(0)->StrValue() == "play") || (amfData.getContentP(0)->StrValue() == "play2")){ //set reply number and stream name, actual reply is sent up in the ss.spool() handler playTransaction = amfData.getContentP(1)->NumValue(); playMessageType = messageType; playStreamId = streamId; streamName = amfData.getContentP(3)->StrValue(); Connector_RTMP::ready4data = true; //start sending video data! return; } //play if ((amfData.getContentP(0)->StrValue() == "seek")){ //set reply number and stream name, actual reply is sent up in the ss.spool() handler playTransaction = amfData.getContentP(1)->NumValue(); playMessageType = messageType; playStreamId = streamId; streamInited = false; AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "onStatus")); //status reply amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("level", "status")); amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Seek.Notify")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Seeking to the specified time")); amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfReply, playMessageType, playStreamId); ss.Send("s "); ss.Send(JSON::Value((long long int)amfData.getContentP(3)->NumValue()).asString().c_str()); ss.SendNow("\n"); return; } //seek if ((amfData.getContentP(0)->StrValue() == "pauseRaw") || (amfData.getContentP(0)->StrValue() == "pause")){ if (amfData.getContentP(3)->NumValue()){ ss.Send("q\n"); //quit playing //send a status reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "onStatus")); //status reply amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("level", "status")); amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Pause.Notify")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Pausing playback")); amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfReply, playMessageType, playStreamId); }else{ ss.SendNow("p\n"); //start playing //send a status reply AMF::Object amfReply("container", AMF::AMF0_DDV_CONTAINER); amfReply.addContent(AMF::Object("", "onStatus")); //status reply amfReply.addContent(amfData.getContent(1)); //same transaction ID amfReply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfReply.addContent(AMF::Object("")); //info amfReply.getContentP(3)->addContent(AMF::Object("level", "status")); amfReply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Unpause.Notify")); amfReply.getContentP(3)->addContent(AMF::Object("description", "Resuming playback")); amfReply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfReply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfReply, playMessageType, playStreamId); } return; } //seek #if DEBUG >= 2 fprintf(stderr, "AMF0 command not processed!\n%s\n", amfData.Print().c_str()); #endif } //parseAMFCommand
///\brief Main function for the RTMP Connector ///\param conn A socket describing the connection the client. ///\return The exit code of the connector. int rtmpConnector(Socket::Connection conn){ Socket = conn; Socket.setBlocking(false); FLV::Tag tag, init_tag; DTSC::Stream Strm; while ( !Socket.Received().available(1537) && Socket.connected()){ Socket.spool(); Util::sleep(5); } RTMPStream::handshake_in = Socket.Received().remove(1537); RTMPStream::rec_cnt += 1537; if (RTMPStream::doHandshake()){ Socket.SendNow(RTMPStream::handshake_out); while ( !Socket.Received().available(1536) && Socket.connected()){ Socket.spool(); Util::sleep(5); } Socket.Received().remove(1536); RTMPStream::rec_cnt += 1536; #if DEBUG >= 5 fprintf(stderr, "Handshake succcess!\n"); #endif }else{ #if DEBUG >= 5 fprintf(stderr, "Handshake fail!\n"); #endif return 0; } unsigned int lastStats = 0; bool firsttime = true; while (Socket.connected()){ if (Socket.spool() || firsttime){ parseChunk(Socket.Received()); firsttime = false; }else{ Util::sleep(1); //sleep 1ms to prevent high CPU usage } if (ready4data){ if ( !inited){ //we are ready, connect the socket! ss = Util::Stream::getStream(streamName); if ( !ss.connected()){ #if DEBUG >= 1 fprintf(stderr, "Could not connect to server!\n"); #endif Socket.close(); //disconnect user break; } ss.setBlocking(false); Strm.waitForMeta(ss); //find first audio and video tracks for (JSON::ObjIter objIt = Strm.metadata["tracks"].ObjBegin(); objIt != Strm.metadata["tracks"].ObjEnd(); objIt++){ if (videoID == -1 && objIt->second["type"].asStringRef() == "video"){ videoID = objIt->second["trackid"].asInt(); } if (audioID == -1 && objIt->second["type"].asStringRef() == "audio"){ audioID = objIt->second["trackid"].asInt(); } } //select the tracks and play std::stringstream cmd; cmd << "t"; if (videoID != -1){ cmd << " " << videoID; } if (audioID != -1){ cmd << " " << audioID; } cmd << "\np\n"; ss.SendNow(cmd.str().c_str()); inited = true; } if (inited && !noStats){ long long int now = Util::epoch(); if (now != lastStats){ lastStats = now; ss.SendNow(Socket.getStats("RTMP")); } } if (ss.spool()){ while (Strm.parsePacket(ss.Received())){ if (playTransaction != -1){ //send a status reply AMF::Object amfreply("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus")); //status reply amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfreply.addContent(AMF::Object("")); //info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Reset")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing and resetting...")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, playMessageType, playStreamId); //send streamisrecorded if stream, well, is recorded. if (Strm.metadata.isMember("length") && Strm.metadata["length"].asInt() > 0){ Socket.Send(RTMPStream::SendUSR(4, 1)); //send UCM StreamIsRecorded (4), stream 1 } //send streambegin Socket.Send(RTMPStream::SendUSR(0, 1)); //send UCM StreamBegin (0), stream 1 //and more reply amfreply = AMF::Object("container", AMF::AMF0_DDV_CONTAINER); amfreply.addContent(AMF::Object("", "onStatus")); //status reply amfreply.addContent(AMF::Object("", (double)playTransaction)); //same transaction ID amfreply.addContent(AMF::Object("", (double)0, AMF::AMF0_NULL)); //null - command info amfreply.addContent(AMF::Object("")); //info amfreply.getContentP(3)->addContent(AMF::Object("level", "status")); amfreply.getContentP(3)->addContent(AMF::Object("code", "NetStream.Play.Start")); amfreply.getContentP(3)->addContent(AMF::Object("description", "Playing!")); amfreply.getContentP(3)->addContent(AMF::Object("details", "DDV")); amfreply.getContentP(3)->addContent(AMF::Object("clientid", (double)1337)); sendCommand(amfreply, playMessageType, playStreamId); RTMPStream::chunk_snd_max = 102400; //100KiB Socket.Send(RTMPStream::SendCTL(1, RTMPStream::chunk_snd_max)); //send chunk size max (msg 1) //send dunno? Socket.Send(RTMPStream::SendUSR(32, 1)); //send UCM no clue?, stream 1 playTransaction = -1; } //sent init data if needed if ( !streamInited){ init_tag.DTSCMetaInit(Strm, Strm.getTrackById(videoID), Strm.getTrackById(audioID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); if (audioID != -1 && Strm.getTrackById(audioID).isMember("init")){ init_tag.DTSCAudioInit(Strm.getTrackById(audioID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } if (videoID != -1 && Strm.getTrackById(videoID).isMember("init")){ init_tag.DTSCVideoInit(Strm.getTrackById(videoID)); Socket.SendNow(RTMPStream::SendMedia(init_tag)); } streamInited = true; } //sent a tag tag.DTSCLoader(Strm); Socket.SendNow(RTMPStream::SendMedia(tag)); #if DEBUG >= 8 fprintf(stderr, "Sent tag to %i: [%u] %s\n", Socket.getSocket(), tag.tagTime(), tag.tagType().c_str()); #endif } } } } Socket.close(); ss.SendNow(Socket.getStats("RTMP").c_str()); ss.close(); return 0; } //Connector_RTMP