int main( int argc, char **argv ) { //int ret = 0; PTZControlInit(); demo_setting * ext_gSettings = NULL; // Allocate the "global" settings ext_gSettings = (demo_setting*)malloc( sizeof( demo_setting ) ); if ( NULL == ext_gSettings ) { printf( "main::out of memory!\n" ); return -1; } sig_init(); atexit(appExit); //init the setting struct Settings_Initialize( ext_gSettings ); read_Parse(ext_gSettings); //printf("video type = %d \n", ext_gSettings->video_types); //...do your job //close the led setled_off(); //init dma memory akuio_pmem_init(); encode_init(); printf("encode_init ok\n"); //open camera camera_open(ext_gSettings->width, ext_gSettings->height); printf("camera_open ok\n"); //encode_open T_ENC_INPUT encInput; encInput.width = ext_gSettings->width; //实际编码图像的宽度,能被4整除 encInput.height = ext_gSettings->height; //实际编码图像的长度,能被2整除 encInput.kbpsmode = ext_gSettings->kbpsmode; encInput.qpHdr = ext_gSettings->qpHdr; //初始的QP的值 encInput.iqpHdr = ext_gSettings->iqpHdr; //初始的QP的值 encInput.bitPerSecond = ext_gSettings->bitPerSecond; //目标bps encInput.minQp = ext_gSettings->minQp; encInput.maxQp = ext_gSettings->maxQp; encInput.framePerSecond = ext_gSettings->framePerSecond; encInput.video_tytes = ext_gSettings->video_types; encode_open(&encInput); printf("encode_open ok\n"); //set mux mux_input.rec_path = ext_gSettings->rec_path; mux_input.m_MediaRecType = MEDIALIB_REC_AVI_NORMAL; if (ext_gSettings->bhasAudio) { bHasAudio = 1; //mux_input.m_bCaptureAudio = 1; } else { bHasAudio = 0; //mux_input.m_bCaptureAudio = 0; } mux_input.m_bCaptureAudio = 1; //mux video if(parse.format2 == 0) { mux_input.m_eVideoType = MEDIALIB_VIDEO_H264; } else if(parse.format2 == 1) { mux_input.m_eVideoType = MEDIALIB_VIDEO_MJPEG; } mux_input.m_nWidth = parse.width2; mux_input.m_nHeight = parse.height2; //mux audio mux_input.m_eAudioType = MEDIALIB_AUDIO_AAC; mux_input.m_nSampleRate = 8000; //mux_input.abitsrate = ext_gSettings->abitsrate; printf("mux_open ok\n"); //if (ext_gSettings->bhasAudio) { T_AUDIO_INPUT audioInput; audioInput.enc_type = (AUDIO_ENCODE_TYPE_CC)ext_gSettings->audioType; audioInput.nBitsRate = ext_gSettings->abitsrate; audioInput.nBitsPerSample = 16; audioInput.nChannels = 1; audioInput.nSampleRate = ext_gSettings->aSamplerate; audio_open(&audioInput); printf("audio_open ok\n"); audio_start(); } //start ftp server //startFTPSrv(); Init_photograph(); //PTZControlInit(); //start video process video_process_start(); InitMotionDetect(); DemuxForLiveSetCallBack(); TaskScheduler* scheduler = BasicTaskScheduler::createNew(); env = BasicUsageEnvironment::createNew(*scheduler); UserAuthenticationDatabase* authDB = NULL; #ifdef ACCESS_CONTROL // To implement client access control to the RTSP server, do the following: authDB = new UserAuthenticationDatabase; authDB->addUserRecord("username1", "password1"); // replace these with real strings // Repeat the above with each <username>, <password> that you wish to allow // access to the server. #endif // Create the RTSP server: RTSPServer* rtspServer = AKRTSPServer::createNew(*env, RTSPPORT, authDB); if (rtspServer == NULL) { *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n"; appExit(); exit(1); } char const* descriptionString = "Session streamed by \"testOnDemandRTSPServer\""; // Set up each of the possible streams that can be served by the // RTSP server. Each such stream is implemented using a // "ServerMediaSession" object, plus one or more // "ServerMediaSubsession" objects for each audio/video substream. int vsIndex = 0; VIDEO_MODE vm[2] = {VIDEO_MODE_VGA,VIDEO_MODE_VGA}; const char* streamName1 = "vs1"; const char* streamName2 = "vs2"; ((AKRTSPServer*)rtspServer)->SetStreamName(streamName1, streamName2); if(ext_gSettings->video_types == 1) { if(ext_gSettings->width == 640) { vm[0] = VIDEO_MODE_VGA; } else if(ext_gSettings->width == 320) { vm[0] = VIDEO_MODE_QVGA; } else if(ext_gSettings->width == 720) { vm[0] = VIDEO_MODE_D1; } AKIPCMJPEGFramedSource* ipcMJPEGSourcecam = NULL; ServerMediaSession* smsMJPEGcam = ServerMediaSession::createNew(*env, streamName1, 0, descriptionString); AKIPCMJPEGOnDemandMediaSubsession* subsMJPEGcam = AKIPCMJPEGOnDemandMediaSubsession::createNew(*env,ipcMJPEGSourcecam, ext_gSettings->width, ext_gSettings->height, vsIndex); smsMJPEGcam->addSubsession(subsMJPEGcam); subsMJPEGcam->getframefunc = video_process_get_buf; subsMJPEGcam->setledstart = setled_view_start; subsMJPEGcam->setledexit = setled_view_stop; if(bHasAudio) smsMJPEGcam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex)); rtspServer->addServerMediaSession(smsMJPEGcam); char* url1 = rtspServer->rtspURL(smsMJPEGcam); *env << "using url \"" << url1 <<"\"\n"; delete[] url1; } else if(ext_gSettings->video_types == 0) { if(ext_gSettings->width == 1280) { vm[0] = VIDEO_MODE_720P; } else if(ext_gSettings->width == 640) { vm[0] = VIDEO_MODE_VGA; } else if(ext_gSettings->width == 320) { vm[0] = VIDEO_MODE_QVGA; } else if(ext_gSettings->width == 720) { vm[0] = VIDEO_MODE_D1; } AKIPCH264FramedSource* ipcSourcecam = NULL; ServerMediaSession* smscam = ServerMediaSession::createNew(*env, streamName1, 0, descriptionString); AKIPCH264OnDemandMediaSubsession* subscam = AKIPCH264OnDemandMediaSubsession::createNew(*env,ipcSourcecam, 0, vsIndex); smscam->addSubsession(subscam); if(bHasAudio) smscam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex)); subscam->getframefunc = video_process_get_buf; subscam->setledstart = setled_view_start; subscam->setledexit = setled_view_stop; rtspServer->addServerMediaSession(smscam); char* url1 = rtspServer->rtspURL(smscam); *env << "using url \"" << url1 <<"\"\n"; delete[] url1; } vsIndex = 1; if(parse.format2 == 0)//264 { if(parse.width2 == 1280) { vm[1] = VIDEO_MODE_720P; } else if(parse.width2 == 640) { vm[1] = VIDEO_MODE_VGA; } else if(parse.width2 == 320) { vm[1] = VIDEO_MODE_QVGA; } else if(parse.width2 == 720) { vm[1] = VIDEO_MODE_D1; } AKIPCH264FramedSource* ipcSourcecam = NULL; ServerMediaSession* smscam = ServerMediaSession::createNew(*env, streamName2, 0, descriptionString); AKIPCH264OnDemandMediaSubsession* subscam = AKIPCH264OnDemandMediaSubsession::createNew(*env,ipcSourcecam, 0, vsIndex); smscam->addSubsession(subscam); if(bHasAudio) smscam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex)); subscam->getframefunc = video_process_get_buf; subscam->setledstart = setled_view_start; subscam->setledexit = setled_view_stop; rtspServer->addServerMediaSession(smscam); char* url2 = rtspServer->rtspURL(smscam); *env << "using url \"" << url2 <<"\"\n"; delete[] url2; } else if(parse.format2 == 1)//mjpeg { if(parse.width2 == 640) { vm[1] = VIDEO_MODE_VGA; } else if(parse.width2 == 320) { vm[1] = VIDEO_MODE_QVGA; } else if(parse.width2 == 720) { vm[1] = VIDEO_MODE_D1; } AKIPCMJPEGFramedSource* ipcMJPEGSourcecam = NULL; ServerMediaSession* smsMJPEGcam = ServerMediaSession::createNew(*env, streamName2, 0, descriptionString); AKIPCMJPEGOnDemandMediaSubsession* subsMJPEGcam = AKIPCMJPEGOnDemandMediaSubsession::createNew(*env,ipcMJPEGSourcecam, parse.width2, parse.height2, vsIndex); smsMJPEGcam->addSubsession(subsMJPEGcam); subsMJPEGcam->getframefunc = video_process_get_buf; subsMJPEGcam->setledstart = setled_view_start; subsMJPEGcam->setledexit = setled_view_stop; if(bHasAudio) smsMJPEGcam->addSubsession(AKIPCAACAudioOnDemandMediaSubsession::createNew(*env,True,getAACBuf, vsIndex)); rtspServer->addServerMediaSession(smsMJPEGcam); char* url2 = rtspServer->rtspURL(smsMJPEGcam); *env << "using url \"" << url2 <<"\"\n"; delete[] url2; } #if 0 if (rtspServer->setUpTunnelingOverHTTP(80) || rtspServer->setUpTunnelingOverHTTP(8000) || rtspServer->setUpTunnelingOverHTTP(8080)) { *env << "\n(We use port " << rtspServer->httpServerPortNum() << " for optional RTSP-over-HTTP tunneling.)\n"; } else { *env << "\n(RTSP-over-HTTP tunneling is not available.)\n"; } #endif //printf("streamName:%s,Port:%d\n", streamName1, RTSPPORT); NetCtlSrvPar ncsp; memset(&ncsp, 0, sizeof(ncsp)); getDeviceID(ncsp.strDeviceID); printf("device id:**%s**\n", ncsp.strDeviceID); strcpy(ncsp.strStreamName1, streamName1); strcpy(ncsp.strStreamName2, streamName2); ncsp.vm1 = vm[0]; ncsp.vm2 = vm[1]; ncsp.nRtspPort = RTSPPORT; ncsp.nMainFps = parse.fps1; ncsp.nSubFps = parse.fps2; //start net command server startNetCtlServer(&ncsp); printf("[##]start record...\n"); auto_record_file(); printf("[##]auto_record_file() called..\n"); //at last,start rtsp loop env->taskScheduler().doEventLoop(); // does not return return 0; }
int main(int argc, char *argv[]) { if (argc < 3) { printf("Usage: #simple_demo ip port\n"); return -1; } struct cap_handle *caphandle = NULL; struct cvt_handle *cvthandle = NULL; struct enc_handle *enchandle = NULL; struct pac_handle *pachandle = NULL; struct net_handle *nethandle = NULL; struct cap_param capp; struct cvt_param cvtp; struct enc_param encp; struct pac_param pacp; struct net_param netp; // set paraments U32 vfmt = V4L2_PIX_FMT_YUYV; U32 ofmt = V4L2_PIX_FMT_YUV420; capp.dev_name = "/dev/video0"; capp.width = WIDTH; capp.height = HEIGHT; capp.pixfmt = vfmt; capp.rate = FRAMERATE; cvtp.inwidth = WIDTH; cvtp.inheight = HEIGHT; cvtp.inpixfmt = vfmt; cvtp.outwidth = WIDTH; cvtp.outheight = HEIGHT; cvtp.outpixfmt = ofmt; encp.src_picwidth = WIDTH; encp.src_picheight = HEIGHT; encp.enc_picwidth = WIDTH; encp.enc_picheight = HEIGHT; encp.chroma_interleave = 0; encp.fps = FRAMERATE; encp.gop = 30; encp.bitrate = 800; pacp.max_pkt_len = 1400; pacp.ssrc = 10; netp.type = UDP; netp.serip = argv[1]; netp.serport = atoi(argv[2]); caphandle = capture_open(capp); if (!caphandle) { printf("--- Open capture failed\n"); return -1; } cvthandle = convert_open(cvtp); if (!cvthandle) { printf("--- Open convert failed\n"); return -1; } enchandle = encode_open(encp); if (!enchandle) { printf("--- Open encode failed\n"); return -1; } pachandle = pack_open(pacp); if (!pachandle) { printf("--- Open pack failed\n"); return -1; } nethandle = net_open(netp); if (!nethandle) { printf("--- Open network failed\n"); return -1; } // start stream loop int ret; void *cap_buf, *cvt_buf, *hd_buf, *enc_buf, *pac_buf; int cap_len, cvt_len, hd_len, enc_len, pac_len; enum pic_t ptype; unsigned long framecount = 0; capture_start(caphandle); // !!! need to start capture stream! while (1) { ret = capture_get_data(caphandle, &cap_buf, &cap_len); if (ret != 0) { if (ret < 0) // error { printf("--- capture_get_data failed\n"); break; } else // again { usleep(10000); continue; } } if (cap_len <= 0) { printf("!!! No capture data\n"); continue; } // else ret = convert_do(cvthandle, cap_buf, cap_len, &cvt_buf, &cvt_len); if (ret < 0) { printf("--- convert_do failed\n"); break; } if (cvt_len <= 0) { printf("!!! No convert data\n"); continue; } // else // fetch h264 headers first! while ((ret = encode_get_headers(enchandle, &hd_buf, &hd_len, &ptype)) == 1) { //fwrite(hd_buf, 1, hd_len, dumpfile); pack_put(pachandle, hd_buf, hd_len); while (pack_get(pachandle, &pac_buf, &pac_len) == 1) { ret = net_send(nethandle, pac_buf, pac_len); if (ret != pac_len) { printf("send pack data failed, size: %d, err: %s\n", pac_len, strerror(errno)); } } } ret = encode_do(enchandle, cvt_buf, cvt_len, &enc_buf, &enc_len, &ptype); if (ret < 0) { printf("--- encode_do failed\n"); break; } if (enc_len <= 0) { printf("!!! No encode data\n"); continue; } // else //fwrite(enc_buf, 1, enc_len, dumpfile); // RTP pack and send pack_put(pachandle, enc_buf, enc_len); while (pack_get(pachandle, &pac_buf, &pac_len) == 1) { ret = net_send(nethandle, pac_buf, pac_len); if (ret != pac_len) { printf("send pack failed, size: %d, err: %s\n", pac_len, strerror(errno)); } } framecount++; } capture_stop(caphandle); net_close(nethandle); pack_close(pachandle); encode_close(enchandle); convert_close(cvthandle); capture_close(caphandle); return 0; }
int main(int argc, char *argv[]) { struct cap_handle *caphandle = NULL; struct cvt_handle *cvthandle = NULL; struct enc_handle *enchandle = NULL; struct pac_handle *pachandle = NULL; struct net_handle *nethandle = NULL; struct cap_param capp; struct cvt_param cvtp; struct enc_param encp; struct pac_param pacp; struct net_param netp; int stage = 0b00000011; U32 vfmt = V4L2_PIX_FMT_YUYV; U32 ofmt = V4L2_PIX_FMT_YUV420; // set default values capp.dev_name = "/dev/video0"; capp.width = 640; capp.height = 480; capp.pixfmt = vfmt; capp.rate = 15; cvtp.inwidth = 640; cvtp.inheight = 480; cvtp.inpixfmt = vfmt; cvtp.outwidth = 640; cvtp.outheight = 480; cvtp.outpixfmt = ofmt; encp.src_picwidth = 640; encp.src_picheight = 480; encp.enc_picwidth = 640; encp.enc_picheight = 480; encp.chroma_interleave = 0; encp.fps = 15; encp.gop = 12; encp.bitrate = 1000; pacp.max_pkt_len = 1400; pacp.ssrc = 1234; netp.serip = NULL; netp.serport = -1; netp.type = UDP; char *outfile = NULL; // options int opt = 0; static const char *optString = "?di:o:a:p:w:h:r:f:t:g:s:c:"; opt = getopt(argc, argv, optString); while (opt != -1) { int fmt; switch (opt) { case '?': display_usage(); return 0; case 'd': debug = 1; break; case 's': stage = atoi(optarg); break; case 'i': capp.dev_name = optarg; break; case 'o': outfile = optarg; break; case 'a': netp.serip = optarg; break; case 'p': netp.serport = atoi(optarg); break; case 'c': fmt = atoi(optarg); if (fmt == 1) capp.pixfmt = V4L2_PIX_FMT_YUV420; else capp.pixfmt = V4L2_PIX_FMT_YUYV; break; case 'w': capp.width = cvtp.inwidth = cvtp.outwidth = encp.src_picwidth = encp.enc_picwidth = atoi(optarg); break; case 'h': capp.height = cvtp.inheight = cvtp.outheight = encp.src_picheight = encp.enc_picheight = atoi(optarg); break; case 'r': encp.bitrate = atoi(optarg); break; case 'f': capp.rate = encp.fps = atoi(optarg); break; case 't': encp.chroma_interleave = atoi(optarg); break; case 'g': encp.gop = atoi(optarg); break; default: printf("Unknown option: %s\n", optarg); display_usage(); return -1; } opt = getopt(argc, argv, optString); } if (outfile) outfd = fopen(outfile, "wb"); signal(SIGINT, quit_func); caphandle = capture_open(capp); if (!caphandle) { printf("--- Open capture failed\n"); return -1; } if ((stage & 0b00000001) != 0) { cvthandle = convert_open(cvtp); if (!cvthandle) { printf("--- Open convert failed\n"); return -1; } } if ((stage & 0b00000010) != 0) { enchandle = encode_open(encp); if (!enchandle) { printf("--- Open encode failed\n"); return -1; } } if ((stage & 0b00000100) != 0) { pachandle = pack_open(pacp); if (!pachandle) { printf("--- Open pack failed\n"); return -1; } } if ((stage & 0b00001000) != 0) { if (netp.serip == NULL || netp.serport == -1) { printf( "--- Server ip and port must be specified when using network\n"); return -1; } nethandle = net_open(netp); if (!nethandle) { printf("--- Open network failed\n"); return -1; } } // start capture encode loop int ret; void *cap_buf, *cvt_buf, *hd_buf, *enc_buf; char *pac_buf = (char *) malloc(MAX_RTP_SIZE); int cap_len, cvt_len, hd_len, enc_len, pac_len; enum pic_t ptype; struct timeval ctime, ltime; unsigned long fps_counter = 0; int sec, usec; double stat_time = 0; capture_start(caphandle); // !!! need to start capture stream! quit = 0; gettimeofday(<ime, NULL); while (!quit) { if (debug) // print fps { gettimeofday(&ctime, NULL); sec = ctime.tv_sec - ltime.tv_sec; usec = ctime.tv_usec - ltime.tv_usec; if (usec < 0) { sec--; usec = usec + 1000000; } stat_time = (sec * 1000000) + usec; // diff in microsecond if (stat_time >= 1000000) // >= 1s { printf("\n*** FPS: %ld\n", fps_counter); fps_counter = 0; ltime = ctime; } fps_counter++; } ret = capture_get_data(caphandle, &cap_buf, &cap_len); if (ret != 0) { if (ret < 0) // error { printf("--- capture_get_data failed\n"); break; } else // again { usleep(10000); continue; } } if (cap_len <= 0) { printf("!!! No capture data\n"); continue; } if (debug) fputc('.', stdout); if ((stage & 0b00000001) == 0) // no convert, capture only { if (outfd) fwrite(cap_buf, 1, cap_len, outfd); continue; } // convert if (capp.pixfmt == V4L2_PIX_FMT_YUV420) // no need to convert { cvt_buf = cap_buf; cvt_len = cap_len; } else // do convert: YUYV => YUV420 { ret = convert_do(cvthandle, cap_buf, cap_len, &cvt_buf, &cvt_len); if (ret < 0) { printf("--- convert_do failed\n"); break; } if (cvt_len <= 0) { printf("!!! No convert data\n"); continue; } } if (debug) fputc('-', stdout); if ((stage & 0b00000010) == 0) // no encode { if (outfd) fwrite(cvt_buf, 1, cvt_len, outfd); continue; } // encode // fetch h264 headers first! while ((ret = encode_get_headers(enchandle, &hd_buf, &hd_len, &ptype)) != 0) { if (debug) fputc('S', stdout); if ((stage & 0b00000100) == 0) // no pack { if (outfd) fwrite(hd_buf, 1, hd_len, outfd); continue; } // pack headers pack_put(pachandle, hd_buf, hd_len); while (pack_get(pachandle, pac_buf, MAX_RTP_SIZE, &pac_len) == 1) { if (debug) fputc('#', stdout); if ((stage & 0b00001000) == 0) // no network { if (outfd) fwrite(pac_buf, 1, pac_len, outfd); continue; } // network ret = net_send(nethandle, pac_buf, pac_len); if (ret != pac_len) { printf("send pack failed, size: %d, err: %s\n", pac_len, strerror(errno)); } if (debug) fputc('>', stdout); } } ret = encode_do(enchandle, cvt_buf, cvt_len, &enc_buf, &enc_len, &ptype); if (ret < 0) { printf("--- encode_do failed\n"); break; } if (enc_len <= 0) { printf("!!! No encode data\n"); continue; } if (debug) { char c; switch (ptype) { case PPS: c = 'S'; break; case SPS: c = 'S'; break; case I: c = 'I'; break; case P: c = 'P'; break; case B: c = 'B'; break; default: c = 'N'; break; } fputc(c, stdout); } if ((stage & 0b00000100) == 0) // no pack { if (outfd) fwrite(enc_buf, 1, enc_len, outfd); continue; } // pack pack_put(pachandle, enc_buf, enc_len); while (pack_get(pachandle, pac_buf, MAX_RTP_SIZE, &pac_len) == 1) { if (debug) fputc('#', stdout); if ((stage & 0b00001000) == 0) // no network { if (outfd) fwrite(pac_buf, 1, pac_len, outfd); continue; } // network ret = net_send(nethandle, pac_buf, pac_len); if (ret != pac_len) { printf("send pack failed, size: %d, err: %s\n", pac_len, strerror(errno)); } if (debug) fputc('>', stdout); } } capture_stop(caphandle); free(pac_buf); if ((stage & 0b00001000) != 0) net_close(nethandle); if ((stage & 0b00000100) != 0) pack_close(pachandle); if ((stage & 0b00000010) != 0) encode_close(enchandle); if ((stage & 0b00000001) != 0) convert_close(cvthandle); capture_close(caphandle); if (outfd) fclose(outfd); return 0; }