Example #1
0
void storageProcessor(void *arg)
{
	struct threadmsg * msg = malloc(sizeof(struct threadmsg));

	while(!thread_queue_get(saveQueue, NULL, msg))
	{
		block_s * block = (block_s *)msg->data;

		saveBlock(block);

		free(block);
	}

	printf("EXITING READ FILE QUEUE\n");
}
Example #2
0
//从packet队列中取用的线程
void *getPacket(void *minstance){
	playInstance *instance = (playInstance *)minstance;
	LOGE("getpacket线程开始\n");
	struct timespec time;
	time.tv_sec=10;//网络不好最多等10秒
	time.tv_nsec=0;
	struct threadmsg msg;

	while(1){
		memset(&msg,0,sizeof(struct threadmsg));
		msg.data=NULL;

		AVPacket pavpacket;
		thread_queue_get(instance->queue,&time,&msg);

		if(msg.msgtype==-1){//正常退出
			LOGE("get线程正常退出\n");
			thread_queue_add(instance->video_queue,NULL,-1);
			thread_queue_add(instance->audio_queue,NULL,-1);
			break;
		}

		if(msg.data ==NULL){
			LOGE("get线程超时退出\n");
			thread_queue_add(instance->video_queue,NULL,-1);
			thread_queue_add(instance->audio_queue,NULL,-1);
			instance->timeout_flag = 1;
			break;
		}

		AVPacket *packet_p = msg.data;
		pavpacket = *packet_p;

		if(pavpacket.stream_index==instance->vs->videoStream){
			thread_queue_add(instance->video_queue,packet_p,1);
		}else
		if(pavpacket.stream_index==instance->vs->audioStream){
			thread_queue_add(instance->audio_queue,packet_p,1);
		}
	}
	return NULL;
}
static void *worker_thread_proc(void *arg)
{
    struct threadqueue *queue = (struct threadqueue *)arg;
    struct threadmsg msg;
    int ret;

    ALOGI("worker thread started");

    while (1) {
        ret = thread_queue_get(queue, NULL, &msg);

        switch (msg.msgtype) {
        case MSG_VIBRATE: {
            int duration = (int)msg.data;
            uint8_t force = immvibe_api_get_force_userspace();
            ALOGV("worker: vibrate(duration=%d, force=%d)", duration, force);

            immvibe_play(fd, force);
            usleep(duration * 1000);  // XXX: too lazy to nanosleep and handle EINTR
            immvibe_play(fd, 0);

            break;
        }

        case MSG_STOP: {
            ALOGV("worker: stop()");
            immvibe_play(fd, 0);

            break;
        }

        default: {
            ALOGW("worker: unknown work, should never happen");
        }
        }
    }

    return NULL;
}
Example #4
0
//音频线程
void *audio_thread(void *minstance){
	playInstance *instance = (playInstance *)minstance;
	LOGE("音频线程开启\n");

	JNIEnv *audioEnv;
	(*(instance->gJavaVm))->AttachCurrentThread(instance->gJavaVm,&audioEnv,NULL);
	jclass javacls = (*audioEnv)->GetObjectClass(audioEnv,instance->gJavaobj);
	jmethodID play = (*audioEnv)->GetMethodID(audioEnv,javacls,"playSound","([BI)V");

	struct timespec time;
	time.tv_sec=10;//网络不好最多等10秒
	time.tv_nsec=0;
	struct threadmsg msg;
	int packet_count = 0;

	while(1){
		if(instance->stop){
			break;
		}
		msg.data=NULL;

		AVPacket pavpacket;
		thread_queue_get(instance->audio_queue,&time,&msg);

		if(msg.msgtype==-1){//正常退出
			break;
		}

		if(msg.data ==NULL){
			LOGE("音频线程空循环\n");
			continue;
		}

		packet_count++;
		if(packet_count == 1){//拿到第一个音频包
			instance->vs->audio_start_time = av_gettime();
			LOGE("音频开始时间 %lld\n",instance->vs->audio_start_time);
		}

		AVPacket *packet_p = msg.data;
		pavpacket = *packet_p;
		uint8_t ** dst_data;

		//延时同步
		int64_t pkt_pts = pavpacket.pts;
		double show_time = pkt_pts * (instance->vs->audio_time_base);
		int64_t show_time_micro = show_time * 1000000;
		int64_t played_time = av_gettime() - instance->vs->audio_start_time;
		int64_t delta_time = show_time_micro - played_time;
		if(delta_time< -(0.2 * 1000000)){
			av_free_packet(packet_p);
			av_free(msg.data);
			LOGE("声音跳帧\n");
			continue;
		}else if(delta_time>0){
			av_usleep(delta_time);
		}

		int len =0;
		int dst_linesize;
		while(pavpacket.size>0){
			int got_frame=0;

			len = avcodec_decode_audio4(instance->vs->aCodecCtx,instance->vs->audio_decode_frame,&got_frame,&pavpacket);
			if(len<0){
				LOGE("audio decode return wrong");								
				break;
			}
			//音频转化
			av_samples_alloc_array_and_samples(&dst_data,&dst_linesize,1,(instance->vs->audio_decode_frame)->nb_samples,AV_SAMPLE_FMT_S16,0);
			swr_convert(instance->vs->swr_ctx,dst_data,(instance->vs->audio_decode_frame)->nb_samples,(const uint8_t **)&(instance->vs->audio_decode_frame->data[0]),(instance->vs->audio_decode_frame)->nb_samples);

			pavpacket.size -= len;
			pavpacket.data += len;

			if(got_frame){
				jbyte *bytes = (*audioEnv)->GetByteArrayElements(audioEnv, instance->global_aarray, NULL);
				memcpy(bytes,*dst_data,dst_linesize);
				(*audioEnv)->ReleaseByteArrayElements(audioEnv, instance->global_aarray, bytes, 0);
				(*audioEnv)->CallVoidMethod(audioEnv,instance->gJavaobj,play,instance->global_aarray,dst_linesize);
			}
			av_free(dst_data[0]);
		}
		av_free_packet(packet_p);
		av_free(msg.data);
	}
	(*(instance->gJavaVm))->DetachCurrentThread(instance->gJavaVm);
	LOGE("音频线程退出\n");
	return NULL;
}
Example #5
0
//视频线程
void *video_thread(void *minstance){
	playInstance *instance = (playInstance *)minstance;
	LOGE("视频线程开始\n");
	struct timespec time;
	time.tv_sec=10;//网络不好最多等10秒
	time.tv_nsec=0;
	struct threadmsg msg;
	int packet_count = 0;

	while(1){
		if(instance->stop){
			break;
		}
		msg.data=NULL;

		thread_queue_get(instance->video_queue,&time,&msg);

		if(msg.msgtype==-1){//正常退出
			LOGE("视频线程正常退出\n");
			break;
		}

		if(msg.data ==NULL){
			LOGE("视频线程超时退出");

			break;
		}
		AVPacket *packet_p = msg.data;
		AVPacket pavpacket = *packet_p;

		packet_count ++;

		if(packet_count == 1){//拿到第一个视频包
			instance->vs->video_start_time = av_gettime();
			LOGE("视频开始时间 %lld\n",instance->vs->video_start_time);
		}

		if(instance->disable_video){
			av_free_packet(packet_p);
			av_free(msg.data);
			continue;
		}
		ANativeWindow_Buffer windowBuffer;

		//延时同步
		int64_t pkt_pts = pavpacket.pts;
		double show_time = pkt_pts * (instance->vs->video_time_base);
		int64_t show_time_micro = show_time * 1000000;
		int64_t played_time = av_gettime() - instance->vs->video_start_time;
		int64_t delta_time = show_time_micro - played_time;
//		LOGE("播放时间 %lld,PTS时间: %lld,差距时间: %lld\n",played_time,show_time_micro,delta_time);
		if(delta_time< -(0.2 * 1000000)){
			LOGE("视频跳帧\n");
		}else if(delta_time>0){
			av_usleep(delta_time);
		}

		int frame_finished=0;
		avcodec_decode_video2(instance->vs->pCodecCtx, instance->vs->pFrame, &frame_finished,&pavpacket);//将pavpacket中的数据解码成,放入pFram中
		if(frame_finished){
			sws_scale//对解码后的数据进行色彩空间转换,yuv420p 转为rgba8888
				(
					instance->vs->sws_ctx,
					(uint8_t const * const *)(instance->vs->pFrame)->data,
					(instance->vs->pFrame)->linesize,
					0,
					instance->vs->pCodecCtx->height,
					instance->vs->RGBAFrame->data,
					instance->vs->RGBAFrame->linesize
				);
			if (!(instance->disable_video) && ANativeWindow_lock(instance->window, &windowBuffer, NULL) < 0) {
				LOGE("cannot lock window");
				continue;
			}else if(!instance->disable_video){
				memcpy(windowBuffer.bits, instance->vs->buffer,  instance->display_width * instance->display_height * 4);//将解码出来的数据复制到surfaceview对应的内存区域
				ANativeWindow_unlockAndPost(instance->window);//释放对surface的锁,并且更新对应surface数据进行显示
			}
		}
		av_free_packet(packet_p);
		av_free(msg.data);
	}
	return NULL;
}