static void process_buf(const char *buf, int count __attribute__((unused)))
{
    int ret;

    switch (buf[0]) {
    case '+': {
        int duration;
        ret = sscanf(&buf[1], "%d$", &duration);
        if (!ret) {
            ALOGW("process_buf: malformed vibrate request");
            return;
        }

        ALOGV("process_buf: vibrate(duration=%d)", duration);
        thread_queue_add(&work_queue, (void *)duration, MSG_VIBRATE);
        break;
    }

    case '-': {
        ALOGV("process_buf: stop()");
        thread_queue_add(&work_queue, NULL, MSG_STOP);
        break;
    }

    default: {
        ALOGW("process_buf: unrecognized request '%s'", buf);
    }
    }
}
Esempio n. 2
0
void addBlockQue(unsigned char cx, unsigned char cy, unsigned char x, unsigned char y, unsigned short z, unsigned short type)
{
	block_s * block = malloc(sizeof(block_s));
	block->cx = cx;
	block->cy = cy;
	block->x = x;
	block->y = y;
	block->z = z;
	block->type = type;

	thread_queue_add(saveQueue, block, 0);
}
Esempio n. 3
0
//从packet队列中取用的线程
void *getPacket(void *minstance){
	playInstance *instance = (playInstance *)minstance;
	LOGE("getpacket线程开始\n");
	struct timespec time;
	time.tv_sec=10;//网络不好最多等10秒
	time.tv_nsec=0;
	struct threadmsg msg;

	while(1){
		memset(&msg,0,sizeof(struct threadmsg));
		msg.data=NULL;

		AVPacket pavpacket;
		thread_queue_get(instance->queue,&time,&msg);

		if(msg.msgtype==-1){//正常退出
			LOGE("get线程正常退出\n");
			thread_queue_add(instance->video_queue,NULL,-1);
			thread_queue_add(instance->audio_queue,NULL,-1);
			break;
		}

		if(msg.data ==NULL){
			LOGE("get线程超时退出\n");
			thread_queue_add(instance->video_queue,NULL,-1);
			thread_queue_add(instance->audio_queue,NULL,-1);
			instance->timeout_flag = 1;
			break;
		}

		AVPacket *packet_p = msg.data;
		pavpacket = *packet_p;

		if(pavpacket.stream_index==instance->vs->videoStream){
			thread_queue_add(instance->video_queue,packet_p,1);
		}else
		if(pavpacket.stream_index==instance->vs->audioStream){
			thread_queue_add(instance->audio_queue,packet_p,1);
		}
	}
	return NULL;
}
Esempio n. 4
0
//启动播放器
int Java_info_sodapanda_sodaplayer_FFmpegVideoView_openfile(JNIEnv* env,jobject obj,jstring file,jlong ptr){
	playInstance * instance = (playInstance *)ptr;
	//初始化队列
	instance->queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->queue);
	instance->video_queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->video_queue);
	instance->audio_queue = malloc(sizeof(struct threadqueue));
	thread_queue_init(instance->audio_queue);

	instance->stop=0;
	instance->timeout_flag = 0;
	instance->vs=av_malloc(sizeof (VideoState));

	LOGE("开始执行openfile\n");
	jboolean isfilenameCopy;
	const char *filename = (*env)-> GetStringUTFChars(env, file, &isfilenameCopy);
	jclass cls = (*env)->GetObjectClass(env,obj);
	instance->initAdudioTrack = (*env)->GetMethodID(env,cls,"initAdudioTrack","(I)[B");
	instance->onNativeConnected = (*env)->GetMethodID(env,cls,"onNativeConnected","()V");
	instance->finishplay = (*env)->GetMethodID(env,cls,"finishplay","()V");

	(*env)->GetJavaVM(env,&(instance->gJavaVm));
	instance->gJavaobj = (*env)->NewGlobalRef(env,obj);

	//video
	AVFormatContext *pFormatCtx =NULL;
	AVCodecContext *pCodecCtx=NULL;
	AVCodec *pCodec=NULL;
	AVFrame *pFrame =NULL;
	int videoStream;
	AVDictionary *videoOptionsDict= NULL;
	struct SwsContext *sws_ctx =NULL;
	void* buffer;
	jobject bitmap;

	//audio
	AVCodecContext *aCodecCtx=NULL;
	AVCodec *aCodec=NULL;
	int audioStream;
	AVDictionary *audioOptionsDict = NULL;
	AVFrame *audio_frame;
	audio_frame = avcodec_alloc_frame();

	av_register_all();	//注册解码器等操作
	avformat_network_init();	//初始化网络
	pFormatCtx= avformat_alloc_context();
	pFormatCtx->max_analyze_duration=1000000;//最长分析时间10000微秒
	pFormatCtx->interrupt_callback.callback = call_back;//设置中断回调函数
	pFormatCtx->interrupt_callback.opaque = instance;//中断回调函数的参数

	//开始读取线程 提前开始为了捕获到打开文件的超时
	pthread_t rtid;
	pthread_create(&rtid,NULL,getPacket,instance);

	//打开视频文件
	if(avformat_open_input(&pFormatCtx,filename, NULL, NULL)!=0){
		if(instance->stop){
			return 0;
		}
		LOGE("无法打开文件\n");
		return -1; // 无法打开视频文件
	}
	if(instance->stop){
		return 0;
	}

	// 检索视频信息
	if(avformat_find_stream_info(pFormatCtx, NULL)<0){
		LOGE("无法找到流信息\n");
		return -1;
	}

	av_dump_format(pFormatCtx, 0, filename, 0);//打印分析的视频信息

	videoStream = -1;
	audioStream = -1;

	int i =0;
	for (i=0;i<pFormatCtx->nb_streams;i++){//遍历寻找音频流和视频流
		if(videoStream<0 && pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
			videoStream = i;
			instance->vs->video_time_base = av_q2d(pFormatCtx->streams[videoStream]->time_base);
			LOGE("videostream is %d\n",videoStream);
		}
		if(audioStream<0 && pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO){
			audioStream = i;
			LOGE("audiostream is %d\n",audioStream);
			instance->vs->audio_time_base = av_q2d(pFormatCtx->streams[audioStream]->time_base);
			instance->vs->sample_rate_src = pFormatCtx->streams[i]->codec->sample_rate;
			instance->vs->sample_fmt = pFormatCtx->streams[i]->codec->sample_fmt;
			instance->vs->sample_layout = pFormatCtx->streams[i]->codec->channel_layout;
			if(instance->vs->sample_rate_src <= 0){
				LOGE("Audio Sample Rate is wrong");
				return -1;
			}else{
				jbyteArray aarray = (jbyteArray)((*env)->CallObjectMethod(env,obj,instance->initAdudioTrack,instance->vs->sample_rate_src));
				instance->global_aarray = (*env)->NewGlobalRef(env,aarray);
				LOGE("initAdudioTrack返回\n");
			}
		}
	}

	if(videoStream==-1){
		LOGE("无法找到视频流");
		return -1;
	}

	//打开音频解码器
	if(audioStream != -1 && instance->vs->sample_rate_src>0){
		aCodecCtx = pFormatCtx->streams[audioStream]->codec;
		aCodec= avcodec_find_decoder(aCodecCtx->codec_id);

		if(avcodec_open2(aCodecCtx,aCodec,&audioOptionsDict)<0){
			LOGE("无法打开Audio解码器");
			return -1;
		}
	}

	//打开视频解码器
	if(videoStream != -1){
		pCodecCtx=pFormatCtx->streams[videoStream]->codec;
		pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
		if(avcodec_open2(pCodecCtx,pCodec,&videoOptionsDict)<0){
			LOGE("无法打开视频解码器\n");
			return -1;
		}
	}

	if(instance->display_height == 0 ){
		instance->display_width = pCodecCtx->width;
		instance->display_height = pCodecCtx->height;
		setAndroidWindowPix(pCodecCtx->width,pCodecCtx->height,instance);
	}

	pFrame = avcodec_alloc_frame();

	//视频转换
	sws_ctx = sws_getContext(
		pCodecCtx->width,
		pCodecCtx->height,
		pCodecCtx->pix_fmt,
		instance->display_width,
		instance->display_height,
		AV_PIX_FMT_RGBA,
		SWS_BILINEAR,
		NULL,
		NULL,
		NULL
	);

	//创建bitmap
	bitmap = createBitmap(env, instance->display_width, instance->display_height);
	AndroidBitmap_lockPixels(env, bitmap, &buffer);
	AVFrame *RGBAFrame;
	RGBAFrame = avcodec_alloc_frame();
	avpicture_fill((AVPicture *) RGBAFrame, buffer, AV_PIX_FMT_RGBA, instance->display_width, instance->display_height);

	//原始音频转换
	struct SwrContext *swr_ctx;
	swr_ctx = swr_alloc();

	av_opt_set_int(swr_ctx, "in_sample_fmt", instance->vs->sample_fmt, 0);
	av_opt_set_int(swr_ctx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
	av_opt_set_int(swr_ctx, "in_channel_layout", instance->vs->sample_layout, 0);
	av_opt_set_int(swr_ctx, "out_channel_layout", AV_CH_LAYOUT_MONO, 0);

	swr_init(swr_ctx);

	instance->vs->RGBAFrame=RGBAFrame;
	instance->vs->buffer=buffer;
	instance->vs->pCodecCtx=pCodecCtx;
	instance->vs->pFrame=pFrame;
	instance->vs->sws_ctx=sws_ctx;
	instance->vs->videoStream=videoStream;
	instance->vs->aCodecCtx=aCodecCtx;
	instance->vs->audioStream=audioStream;
	instance->vs->audio_decode_frame=audio_frame;
	instance->vs->swr_ctx=swr_ctx;

	//视频线程
	pthread_t video_tid;
	if(videoStream!=-1){
		pthread_create(&video_tid,NULL,video_thread,instance);
	}

	//音频线程
	pthread_t audio_tid;
	if(audioStream!=-1 && instance->vs->sample_rate_src >0){
		pthread_create(&audio_tid,NULL,audio_thread,instance);
	}

	//通知android  connected to RTMPserver
	(*env)->CallVoidMethod(env,obj,instance->onNativeConnected);

	while(1){
		if(instance->stop){//关闭线程
			//队列放空表示结束
			thread_queue_add(instance->queue,NULL,-1);
			break;
		}

		AVPacket *packet_p = malloc(sizeof(AVPacket));
		//加入队列
		if(av_read_frame(pFormatCtx,packet_p)<0){//网络断开或停止视频
			thread_queue_add(instance->queue,NULL,-1);
			break;
		}

		thread_queue_add(instance->queue,packet_p,1);
	}

	LOGE("native主循环退出\n");
	thread_queue_add(instance->queue,NULL,-1);//让get线程停止
	pthread_join(rtid,NULL);
	pthread_join(video_tid,NULL);
	pthread_join(audio_tid,NULL);

	LOGE("getpacket线程环退出\n");
	thread_queue_cleanup(instance->queue,1);
	thread_queue_cleanup(instance->video_queue,1);
	thread_queue_cleanup(instance->audio_queue,1);

    av_free(instance->vs);
    av_free(RGBAFrame);
    av_free(pFrame);
    avcodec_close(pCodecCtx);
    avcodec_close(aCodecCtx);
    avformat_close_input(&pFormatCtx);
    AndroidBitmap_unlockPixels(env,bitmap);
    LOGE("清理退出\n");
    if(instance->stop){
    	return 0;
    }
    if(instance->timeout_flag){
    	return -1;
    }else{
        LOGE("执行到finishplay");
    	(*env)->CallVoidMethod(env,obj,instance->finishplay);
    	return 0;
    }
}