*/JNIEXPORT jint JNICALL Java_com_jiuan_it_ipc_utils_RtspFromFFMPEG_init( JNIEnv *env, jobject obj, jstring javaString, jint timebase_seekDen, jint timebase_seekNum, jint isAudio, jintArray iRelFlag) { pFormatCtx = NULL; i = 0; videoindex = 0; audioindex = 0; pCodecCtx_video = NULL; pCodec_video = NULL; pCodecCtx_audio = NULL; pCodec_audio = NULL; out_buffer_video = NULL; out_buffer_audio = NULL; img_convert_ctx = NULL; aud_convert_ctx = NULL; avdic = NULL; y_size = 0; pFrameYUV = NULL; // int iWidth = 0; // int iHeight = 0; int ret = -1; int iArr[1]; LOGD("%s-------%d进入该函数", __FUNCTION__, __LINE__); av_log_set_callback(custom_log); av_register_all(); //注册组件 avformat_network_init(); //支持网络流 pFormatCtx = avformat_alloc_context(); //初始化AVFormatContext pFrameYUV = av_frame_alloc(); if (pFrameYUV == NULL) { ret = -1; goto ErrLab; } //获得RTSP地址 const char *url = (*env)->GetStringUTFChars(env, javaString, 0); LOGD("接收java中的地址是:%s", url); ret = avformat_open_input(&pFormatCtx, url, NULL, NULL); if (ret != 0) { //打开文件 LOGE("无法打开文件,函数返回值是:%s\n", url); ret = -1; goto ErrLab; } else { LOGD("打开了视频源\n"); } if (avformat_find_stream_info(pFormatCtx, NULL) < 0) //查找流信息 { LOGE("找不到流信息\n"); ret = -1; goto ErrLab; } else { LOGD("找到流信息\n"); } videoindex = -1; LOGD("pFormatCtx->nb_streams的值是:%d\n", pFormatCtx->nb_streams); for (i = 0; i <= pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoindex = i; break; } } if (videoindex == -1) { LOGE("没有找到视频流\n"); ret = -1; goto ErrLab; } else { LOGD("找到视频流,%d\n", videoindex); } if (isAudio) { for (i = 0; i <= pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) { audioindex = i; break; } } if (audioindex == -1) { LOGE("没有找到音频流\n"); ret = -1; goto ErrLab; } else { LOGD("找到音频流,%d\n", audioindex); } } // 得到视频流编码的上下文指针 pCodecCtx_video = pFormatCtx->streams[videoindex]->codec; pCodec_video = avcodec_find_decoder(pCodecCtx_video->codec_id); //在库里面查找支持该格式的解码器 if (pCodec_video == NULL) { LOGE("找不到解码器\n"); ret = -1; goto ErrLab; } else { LOGD("找到解码器\n"); } if (avcodec_open2(pCodecCtx_video, pCodec_video, NULL) < 0) //打开解码器 { LOGE("打不开解码器\n"); ret = -1; goto ErrLab; } else { LOGD("打开解码器\n"); } if (isAudio) { // 得到音频流编码的上下文指针 pCodecCtx_audio = pFormatCtx->streams[audioindex]->codec; pCodec_audio = avcodec_find_decoder(pCodecCtx_audio->codec_id); //在库里面查找支持该格式的解码器 if (pCodec_audio == NULL) { LOGE("找不到解码器\n"); ret = -1; goto ErrLab; } else { LOGD("找到解码器\n"); } if (avcodec_open2(pCodecCtx_audio, pCodec_audio, NULL) < 0) //打开解码器 { LOGE("打不开解码器\n"); ret = -1; goto ErrLab; } else { LOGD("打开解码器\n"); } } out_buffer_video = (uint8_t *) av_malloc(avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx_video->width, pCodecCtx_video->height)); // avpicture_fill((AVPicture *) pFrameYUV, out_buffer_video, AV_PIX_FMT_RGB24, 1280, 720); //填充AVFrame // //像素格式转换。pFrame转换为pFrameYUV。 // img_convert_ctx = sws_getContext(pCodecCtx_video->width, // pCodecCtx_video->height, pCodecCtx_video->pix_fmt, 1280, 720, // AV_PIX_FMT_RGB24, SWS_BILINEAR, NULL, NULL, NULL); if (pCodecCtx_video->width == 0 || pCodecCtx_video->height == 0) { LOGE("视频宽高不正常!\n"); ret = -1; goto ErrLab; } if (pCodecCtx_video->width == 0 || pCodecCtx_video->height == 0) { LOGE("视频的高宽值不正常!\n"); ret = -1; goto ErrLab; } avpicture_fill((AVPicture *) pFrameYUV, out_buffer_video, AV_PIX_FMT_RGB24, pCodecCtx_video->width, pCodecCtx_video->height); //填充AVFrame //像素格式转换。pFrame转换为pFrameYUV。 img_convert_ctx = sws_getContext(pCodecCtx_video->width, pCodecCtx_video->height, pCodecCtx_video->pix_fmt, pCodecCtx_video->width, pCodecCtx_video->height, AV_PIX_FMT_RGB24, SWS_BILINEAR, NULL, NULL, NULL); if (img_convert_ctx == NULL) { LOGE("Cannot initialize the conversion context!\n"); ret = -1; goto ErrLab; } y_size = pCodecCtx_video->width * pCodecCtx_video->height; av_dump_format(pFormatCtx, 0, url, 0); if (isAudio) { out_buffer_audio = (uint8_t *) av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE*2); aud_convert_ctx = swr_alloc(); //分配SwrContext if (aud_convert_ctx == NULL) { LOGE("设置上下文出错"); ret = -1; goto ErrLab; } aud_convert_ctx = swr_alloc_set_opts(aud_convert_ctx, AV_CH_LAYOUT_MONO/*pFrame->channel_layout*/, // out channel layout AV_SAMPLE_FMT_S16, // out sample format 16000, // out sample rate av_get_default_channel_layout(pCodecCtx_audio->channels),//av_frame_get_channels(frame) pCodecCtx_audio->sample_fmt, // in sample format pCodecCtx_audio->sample_rate, // in sample rate 0, // log offset NULL); // log context swr_init(aud_convert_ctx); } cbuf_init(&m_cbuf_video); cbuf_init(&m_cbuf_audio); IsClose = 0; m_line_finish = 0; iErrFrame_gloab = 0; iLast_PTS = 0; iStartFlag = 0; iPts_Video = AV_NOPTS_VALUE; iPts_Audio = AV_NOPTS_VALUE; timebase_pri.den = timebase_seekDen * 2; //1000 * 2 timebase_pri.num = timebase_seekNum; //1 iCurrentWidth = pCodecCtx_video->width; iCurrentHeight = pCodecCtx_video->height; if (iCurrentWidth == 1280){ iArr[0] = 3; (*env)->SetIntArrayRegion(env, iRelFlag, 0, 1, iArr); } else if (iCurrentWidth == 640){ iArr[0] = 4; (*env)->SetIntArrayRegion(env, iRelFlag, 0, 1, iArr); } else if (iCurrentWidth == 320){ iArr[0] = 5; (*env)->SetIntArrayRegion(env, iRelFlag, 0, 1, iArr); } (*env)->ReleaseStringUTFChars(env, javaString, url); ret = 0; ErrLab: if (ret == -1) Java_com_jiuan_it_ipc_utils_RtspFromFFMPEG_close(env, obj); return ret; }
void data_init(struct ts *ts) { memset(ts, 0, sizeof(struct ts)); // Stream ts->pat = ts_pat_alloc(); ts->curpat = ts_pat_alloc(); ts->genpat = ts_pat_alloc(); ts->cat = ts_cat_alloc(); ts->curcat = ts_cat_alloc(); ts->pmt = ts_pmt_alloc(); ts->curpmt = ts_pmt_alloc(); ts->sdt = ts_sdt_alloc(); ts->cursdt = ts_sdt_alloc(); ts->emm = ts_privsec_alloc(); ts->last_emm = ts_privsec_alloc(); ts->tmp_emm = ts_privsec_alloc(); ts->ecm = ts_privsec_alloc(); ts->last_ecm = ts_privsec_alloc(); ts->tmp_ecm = ts_privsec_alloc(); pidmap_clear(&ts->pidmap); pidmap_clear(&ts->cc); pidmap_clear(&ts->pid_seen); // Key memset(&ts->key, 0, sizeof(ts->key)); ts->key.csakey = csa_key_alloc(); gettimeofday(&ts->key.ts_keyset, NULL); // CAMD memset(&ts->camd, 0, sizeof(ts->camd)); ts->camd.server_fd = -1; ts->camd.server_port = 2233; ts->camd.key = &ts->key; ts->camd.user = "******"; ts->camd.pass = "******"; strcpy(ts->camd.newcamd.hex_des_key, "0102030405060708091011121314"); camd_proto_cs378x(&ts->camd.ops); // Config ts->syslog_port = 514; ts->ts_discont = 1; ts->ecm_cw_log = 1; ts->debug_level = 0; ts->req_CA_sys = CA_CONAX; ts->emm_send = 0; ts->pid_filter = 1; ts->emm_report_interval = 60; ts->emm_last_report = time(NULL); ts->ecm_report_interval = 60; ts->ecm_last_report = time(NULL); ts->cw_warn_sec = 60; ts->cw_last_warn= time(NULL); ts->cw_last_warn= ts->cw_last_warn + ts->cw_warn_sec; ts->key.ts = time(NULL); ts->input.fd = 0; // STDIN ts->input.type = FILE_IO; ts->output.fd = 1; // STDOUT ts->output.type = FILE_IO; ts->output.ttl = 1; ts->output.tos = -1; ts->decode_buf = cbuf_init((7 * csa_get_batch_size() * 188) * 16, "decode"); // ~658Kb ts->write_buf = cbuf_init((7 * csa_get_batch_size() * 188) * 8, "write"); // ~324Kb ts->input_buffer= list_new("input"); pthread_attr_init(&ts->thread_attr); size_t stack_size; pthread_attr_getstacksize(&ts->thread_attr, &stack_size); if (stack_size > THREAD_STACK_SIZE) pthread_attr_setstacksize(&ts->thread_attr, THREAD_STACK_SIZE); }