bool VideoEncoderX264or5::doProcessFrame(Frame *org, Frame *dst) { if (!(org && dst)) { utils::errorMsg("Error encoding video frame: org or dst are NULL"); return false; } VideoFrame* rawFrame = dynamic_cast<VideoFrame*> (org); VideoFrame* codedFrame = dynamic_cast<VideoFrame*> (dst); if (!rawFrame || !codedFrame) { utils::errorMsg("Error encoding video frame: org and dst MUST be VideoFrame"); return false; } if (!reconfigure(rawFrame, codedFrame)) { utils::errorMsg("Error encoding video frame: reconfigure failed"); return false; } if (!fill_x264or5_picture(rawFrame)){ utils::errorMsg("Could not fill x264_picture_t from frame"); return false; } if (!encodeFrame(codedFrame)) { utils::errorMsg("Could not encode video frame"); return false; } codedFrame->setSize(rawFrame->getWidth(), rawFrame->getHeight()); return true; }
VideoFrame *SimpleV4L2::readFrame() { struct v4l2_buffer buf; unsigned int i; VideoFrame *frame = new VideoFrame(); frame->setPixelFormat(QVideoFrame::Format_RGB32); frame->setCaptureTime(QTime::currentTime()); frame->setBufferType(VideoFrame::BUFFER_POINTER); frame->setHoldTime(1000/30); frame->setSize(m_imageSize); if(!m_startedCapturing) return frame; /* struct timeval tv_start, tv_end; if (gettimeofday(&tv_start, 0) < 0) { errno_exit("Error getting time"); }*/ //QByteArray array; switch (m_io) { case IO_METHOD_READ: if (-1 == read (m_fd, m_buffers[0].start, m_buffers[0].length)) { switch (errno) { case EAGAIN: return frame; case EIO: /* Could ignore EIO, see spec. */ /* fall through */ default: errno_exit ("read"); } } //array.append((char*)m_buffers[0].start, m_buffers[0].length); //frame->setByteArray(array); //frame->pointer = (uchar*)malloc(sizeof(uchar) * m_buffers[0].length); memcpy(frame->allocPointer(m_buffers[0].length), m_buffers[0].start, m_buffers[0].length); break; case IO_METHOD_MMAP: CLEAR (buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if (-1 == xioctl (m_fd, VIDIOC_DQBUF, &buf)) { switch (errno) { case EAGAIN: return frame; case EIO: /* Could ignore EIO, see spec. */ /* fall through */ default: errno_exit ("VIDIOC_DQBUF"); } } assert (buf.index < m_numBuffers); // array.append((char*)m_buffers[buf.index].start, m_buffers[buf.index].length); // frame->setByteArray(array); //frame->pointer = (uchar*)malloc(sizeof(uchar) * m_buffers[buf.index].length); { uchar *pointer = frame->allocPointer(m_buffers[buf.index].length); //qDebug() << "SimpleV4L2::readFrame: Read "<<m_buffers[buf.index].length<<" bytes into pointer "<<pointer; memcpy(pointer, m_buffers[buf.index].start, m_buffers[buf.index].length); } if (-1 == xioctl (m_fd, VIDIOC_QBUF, &buf)) errno_exit ("VIDIOC_QBUF"); break; case IO_METHOD_USERPTR: CLEAR (buf); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_USERPTR; if (-1 == xioctl (m_fd, VIDIOC_DQBUF, &buf)) { switch (errno) { case EAGAIN: return frame; case EIO: /* Could ignore EIO, see spec. */ /* fall through */ default: errno_exit ("VIDIOC_DQBUF"); } } for (i = 0; i < m_numBuffers; ++i) if (buf.m.userptr == (unsigned long) m_buffers[i].start && buf.length == m_buffers[i].length) break; assert (i < m_numBuffers); //process_image ((void *) buf.m.userptr); // array.append((char*)buf.m.userptr, buf.length); // frame->setByteArray(array); // frame->pointer = (uchar*)malloc(sizeof(uchar) * buf.length); // memcpy(frame->pointer, buf.m.userptr, buf.length); if (-1 == xioctl (m_fd, VIDIOC_QBUF, &buf)) errno_exit ("VIDIOC_QBUF"); break; } /* if (gettimeofday(&tv_end, 0) < 0) { errno_exit("Error getting time"); } long int elapsed = tv_end.tv_usec - tv_start.tv_usec; printf("Elapsed: %d usec\n",elapsed);*/ //return elapsed; return frame; }