Beispiel #1
0
static int cameraSourceCallback(void *cookie, void *data)
{
	//printf("cameraSourceCallback %lld\n",current_time());
	Video_Recorder* recorder = (Video_Recorder*) cookie;

	cedarv_encoder_t* venc_device = recorder->venc_device;
	AWCameraDevice* cameraDevice = recorder->cameraDevice;

	VencInputBuffer input_buffer;
	int result = 0;
	struct v4l2_buffer *p_buf = (struct v4l2_buffer *) data;
	v4l2_mem_map_t* p_v4l2_mem_map = GetMapmemAddress(getV4L2ctx(cameraDevice));

#if CAP_FPS == 15
	if(current_time() - g_time > 35){
#endif
		void *buffer = (void *) p_v4l2_mem_map->mem[p_buf->index];
		/* get input buffer*/
		result = venc_device->ioctrl(venc_device, VENC_CMD_GET_ALLOCATE_INPUT_BUFFER, &input_buffer);
		
		//printf("into result:%d %lld\n",result,current_time());
		if (result == 0 )
		{
			//printf("into encoder:%lld\n",current_time());
			YUV422To420((unsigned char*) buffer, input_buffer.addrvirY, recorder->width, recorder->height);
			waterMarkShowTime(recorder->waterMark, input_buffer.addrvirY, recorder->width, recorder->height, 10, 10);
			//printf("e:%lld\n",current_time());
			input_buffer.addrvirC = input_buffer.addrvirY + recorder->width * recorder->height;
			venc_device->ioctrl(venc_device, VENC_CMD_FLUSHCACHE_ALLOCATE_INPUT_BUFFER, &input_buffer);
			result = venc_device->ioctrl(venc_device, VENC_CMD_ENQUENE_INPUT_BUFFER, &input_buffer);
		}
		
#if CAP_FPS == 15
		g_time = current_time();
	}
#endif
	//printf("into encoder:%lld end\n",current_time());
	
	cameraDevice->returnFrame(cameraDevice, p_buf->index);
	return 1;
}
Beispiel #2
0
/**
 * @brief 
 *
 * @param fd /dev/video设备文件
 * @param fp 264文件fp
 *
 * @return 
 */
int read_frame(int fd, FILE *fp)
{
    printf("read_frame\n");
    struct v4l2_buffer buf;
    unsigned int i;

    bzero(&buf,sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    //从队列中取缓冲区
    if(-1 == ioctl(fd,VIDIOC_DQBUF,&buf))
    {
        perror("Fail to ioctl 'VIDIOC_DQBUF'");
        exit(EXIT_FAILURE);
    }

    assert(buf.index < n_buffer);
//    memset(user_buf[4].start, 0, 1024);

    YUV422To420(user_buf[buf.index].start, user_buf[4].start, 640, 480);

//    fwrite(user_buf[buf.index].start, user_buf[buf.index].length, 1, fp);
    fwrite(user_buf[4].start, user_buf[buf.index].length, 1, fp);
    //usleep(500);


    //读取进程空间的数据到一个文件中
#if 0
    process_image(user_buf[buf.index].start,user_buf[buf.index].length);
#endif

    if(-1 == ioctl(fd,VIDIOC_QBUF,&buf)) /*Sean Hou: 将采集的视频写入到文件中后又将用户视频buf地址放入缓冲队列,供视频采集的填充 */
    {
        perror("Fail to ioctl 'VIDIOC_QBUF'");
        exit(EXIT_FAILURE);
    }

    return 1;
}
Beispiel #3
0
int CLiveParser::GetOnePacket(char *pData, J_StreamHeader &streamHeader)
{
	WLock(m_rwLocker);
	if (m_dateType == J_VideoMjpeg)
	{
		if (m_nDataSize < (int)sizeof(int))
		{
			streamHeader.dataLen = 0;
			RWUnlock(m_rwLocker);
			return J_NOT_COMPLATE;
		}

		int nOffset = 0;
		int nLen = *((int *)(m_pDataBuff));
		nOffset += sizeof(int);
		memcpy(pData, m_pDataBuff + nOffset, nLen);
		nOffset += nLen;
		memmove(m_pDataBuff, m_pDataBuff + nOffset, m_nDataSize - nOffset);
		m_nDataSize -= nOffset;

		streamHeader.timeStamp = CTime::Instance()->GetLocalTime(0);
		streamHeader.dataLen = nLen;
		streamHeader.frameType = J_VideoIFrame;
	}
	else
	{
		int iDataLen = 640 * 480;
		if (m_nDataSize < iDataLen * 2)
		{
			streamHeader.dataLen = 0;
			RWUnlock(m_rwLocker);
			return J_NOT_COMPLATE;
		}

		streamHeader.timeStamp = CTime::Instance()->GetLocalTime(0);
		int nOffset = 0;
		YUV422To420((uint8_t *)m_pDataBuff, m_pPicIn->img.plane[0], m_pPicIn->img.plane[1], m_pPicIn->img.plane[2], 640, 480);
		nOffset = iDataLen * 2;

		m_pPicIn->i_pts = 0;//streamHeader.timeStamp * 90;
		//m_pPicIn->i_dts = streamHeader.timeStamp * 90;
		//++g_uiPTSFactor;
		encode(m_pX264Handle, m_pPicIn, m_pPicOut);
		memmove(m_pDataBuff, m_pDataBuff + nOffset, m_nDataSize - nOffset);
		m_nDataSize -= nOffset;

		nOffset = 0;
		for (int i = 0; i < m_iNal; ++i)
		{
			/*static FILE *fp = NULL;
			if (fp == NULL)
				fp = fopen("test.h264", "wb+");
			fwrite(m_pNals[i].p_payload, 1, m_pNals[i].i_payload, fp);*/

			memcpy(pData + nOffset, m_pNals[i].p_payload, m_pNals[i].i_payload);
			nOffset += m_pNals[i].i_payload;
		}

		streamHeader.dataLen = nOffset;
		streamHeader.frameType = J_VideoIFrame;
	}

	RWUnlock(m_rwLocker);
	return J_OK;
}