Example #1
0
JNIEXPORT jstring JNICALL JNIDEFINE(nativeCallGatway)(JNIEnv* env, jclass clz, jstring jstr_file, jstring jstr_uri) {
   	std::string file, uri, result;
	file = convert_jstring(env, jstr_file);
	uri = convert_jstring(env, jstr_uri);

    result = "";
    if (GatewayInterface::Gateway != NULL)
        GatewayInterface::Gateway->RunInterface(file, uri, result);
    
    return env->NewStringUTF(result.c_str()); 
}
Example #2
0
void
Java_tv_xormedia_AndCodec_CodecLib_EasyDecoderTest(JNIEnv* env, jobject thiz, jstring input_file, jstring output_file, int width, int height, int out_fmt, int frames, int is_writefile)
{	
	and_log_writeline_simple(0, LOG_INFO, "EasyDecoderTest()");
	and_log_init("/mnt/sdcard/easy_decoder.log", LOG_INFO);
	
	//parse input and output filename
	char str_in_file[256]	= {0};
	char str_out_file[256]	= {0};
	char str_tmp[256]	= {0};
	int str_len = 256;
	
	convert_jstring(env, str_tmp, &str_len, input_file);
	sprintf(str_in_file, "/data/data/tv.xormedia.AndCodec/files/%s", str_tmp);
	convert_jstring(env, str_tmp, &str_len, output_file);
	sprintf(str_out_file, "/data/data/tv.xormedia.AndCodec/files/%s", str_tmp);
	
	and_log_writeline_easy(0, LOG_INFO, "in: %s, out %s, frames %d, write_file %d", 
		str_in_file, str_out_file,frames, is_writefile);
	
	avcodec_register_all();
	
	int PIC_W = width;
	int PIC_H = height;
	int ret = 0;
	enum AVPixelFormat out_pix_fmt = AV_PIX_FMT_RGB565LE;

	uint8_t* video_dst_data[4] = {NULL};
	int video_dst_linesize[4];

	AVCodecContext*			dec_ctx		= NULL;
	AVCodec*				dec			= NULL;
	unsigned char*			pbuf		= NULL;
	AVFrame*				frame 		= NULL;
	AVPacket pkt;

	int to_read, readed;
	int fd_in;
	fd_in = and_sysutil_open_file(str_in_file, kANDSysUtilOpenReadOnly);
	if(fd_in < 0) {
		and_log_writeline_easy(0, LOG_ERROR, "failed to open h264(with size) file: %s", str_in_file);
		return;
	}
	
	int written;
	int fd_out;
	fd_out = and_sysutil_create_or_open_file(str_out_file, 0644);
	if(fd_out < 0) {
		and_log_writeline_easy(0, LOG_ERROR, "failed to open rgb565 file: %s", str_out_file);
		return;
	}

	do {
		dec = avcodec_find_decoder(AV_CODEC_ID_H264);
	
		dec_ctx = avcodec_alloc_context3(dec);
		dec_ctx->width	 = PIC_W;
		dec_ctx->height	 = PIC_H;
		dec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;

		ret = avcodec_open2(dec_ctx, dec, NULL);
		if(ret < 0) {
			and_log_writeline_easy(0, LOG_ERROR, "failed to open video decoder .");
			break;
		}
	
        ret = av_image_alloc(video_dst_data, video_dst_linesize, dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt, 1);
		struct SwsContext* img_convert_ctx = sws_getContext(dec_ctx->width, dec_ctx->height, dec_ctx->pix_fmt, PIC_W, PIC_H, out_pix_fmt, SWS_BICUBIC, NULL, NULL, NULL);

		AVFrame* rgb_pic = alloc_picture(out_pix_fmt, PIC_W, PIC_H);
		frame = avcodec_alloc_frame();

		av_init_packet(&pkt);
		pkt.data = NULL;
		pkt.size = 0;

		const int frame_len = 65536;
		pbuf = (unsigned char*)malloc(frame_len);

		int got_frame = 0;
		int dec_frames = 0; 
		while(1)
		{ 
			to_read = 4;
			readed = and_sysutil_read(fd_in, pbuf, to_read);
			if (readed != to_read) {
				and_log_writeline_easy(0, LOG_INFO, "eof. decoder done! total %d frames", dec_frames);
				break;
			}

			to_read = *((int*)pbuf);
			and_log_writeline_easy(0, LOG_DEBUG, "read h264 frame %d. %c%c%c%c", 
				to_read, pbuf[0], pbuf[1], pbuf[2], pbuf[3]);
			readed = and_sysutil_read(fd_in, pbuf, to_read);
			if (readed != to_read) {
				and_log_writeline_easy(0, LOG_INFO, "eof.");
				break;
			}

			pkt.data = pbuf;
			pkt.size = readed;
			and_log_writeline_easy(0, LOG_DEBUG, "encoder frame size:[%d] %d", dec_frames, readed);

			dec_frames++;
			while(1)
			{
				if( pkt.size == 0)
					break;

				ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, &pkt);
				if (ret < 0) 
					break;

				if(pkt.data) {
					pkt.data += ret;
					pkt.size -= ret;
				}

				if(got_frame)		{
					and_log_writeline_easy(0, LOG_DEBUG, "got pic.");
					av_image_copy(video_dst_data, video_dst_linesize,(const uint8_t **)(frame->data), frame->linesize,
						dec_ctx->pix_fmt, dec_ctx->width, dec_ctx->height);

					//to out_pix_fmt 
					sws_scale(img_convert_ctx, (uint8_t const**)video_dst_data, video_dst_linesize, 0, dec_ctx->height, rgb_pic->data, rgb_pic->linesize);

					int len = rgb_pic->linesize[0] * PIC_H;
					and_log_writeline_easy(0, LOG_DEBUG, "pic len:%d.", len);
					written = and_sysutil_write(fd_out, (void *)rgb_pic->data[0], len);
					if (written != len) {
						and_log_writeline_easy(0, LOG_ERROR, "failed to write %d - %d", len, written);
						break;
					}

					av_free_packet(&pkt);

				}
			}
		}

		pkt.data = NULL;
		pkt.size = 0;
		break;
	}while(0);

	if(pbuf)
		free(pbuf);

	if(dec_ctx)
		avcodec_close(dec_ctx);

	if(frame)
		av_free(frame);

	if(video_dst_data[0])
		av_free(video_dst_data[0]);

	and_sysutil_close(fd_in);
	and_sysutil_close(fd_out);
}