OMXVideoDecoderWMV::OMXVideoDecoderWMV() { omx_verboseLog("OMXVideoDecoderWMV is constructed."); mVideoDecoder = createVideoDecoder(WMV_MIME_TYPE); if (!mVideoDecoder) { omx_errorLog("createVideoDecoder failed for \"%s\"", WMV_MIME_TYPE); } BuildHandlerList(); }
OMXVideoDecoderH263::OMXVideoDecoderH263() { LOGV("OMXVideoDecoderH263 is constructed."); mVideoDecoder = createVideoDecoder(H263_MIME_TYPE); if (!mVideoDecoder) { LOGE("createVideoDecoder failed for \"%s\"", H263_MIME_TYPE); } mNativeBufferCount = OUTPORT_NATIVE_BUFFER_COUNT; BuildHandlerList(); }
bool VppInputDecode::init(const char* inputFileName, uint32_t /*fourcc*/, int /*width*/, int /*height*/) { m_input.reset(DecodeInput::create(inputFileName)); if (!m_input) return false; m_decoder.reset(createVideoDecoder(m_input->getMimeType()), releaseVideoDecoder); if (!m_decoder) { fprintf(stderr, "failed create decoder for %s", m_input->getMimeType()); return false; } return true; }
OMXVideoDecoderAVC::OMXVideoDecoderAVC() : mAccumulateBuffer(NULL), mBufferSize(0), mFilledLen(0), mTimeStamp(INVALID_PTS) { LOGV("OMXVideoDecoderAVC is constructed."); mVideoDecoder = createVideoDecoder(AVC_MIME_TYPE); if (!mVideoDecoder) { LOGE("createVideoDecoder failed for \"%s\"", AVC_MIME_TYPE); } // Override default native buffer count defined in the base class mNativeBufferCount = OUTPORT_NATIVE_BUFFER_COUNT; BuildHandlerList(); }
OMXVideoDecoderAVCSecure::OMXVideoDecoderAVCSecure() : mKeepAliveTimer(0), mSessionPaused(false), mDrmDevFd(-1) { LOGV("OMXVideoDecoderAVCSecure is constructed."); mVideoDecoder = createVideoDecoder(AVC_SECURE_MIME_TYPE); if (!mVideoDecoder) { LOGE("createVideoDecoder failed for \"%s\"", AVC_SECURE_MIME_TYPE); } // Override default native buffer count defined in the base class mNativeBufferCount = OUTPORT_NATIVE_BUFFER_COUNT; BuildHandlerList(); mDrmDevFd = open("/dev/card0", O_RDWR, 0); if (mDrmDevFd < 0) { LOGE("Failed to open drm device."); } }
int32_t V4l2Decoder::ioctl(int command, void* arg) { int32_t ret = 0; int port = -1; DEBUG("fd: %d, ioctl command: %s", m_fd[0], IoctlCommandString(command)); switch (command) { case VIDIOC_QBUF: { #ifdef ANDROID struct v4l2_buffer *qbuf = static_cast<struct v4l2_buffer*>(arg); static uint32_t bufferCount = 0; if(qbuf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE && m_streamOn[OUTPUT] == false) { ASSERT(qbuf->memory == V4L2_MEMORY_ANDROID_BUFFER_HANDLE); m_bufferHandle.push_back((buffer_handle_t)(qbuf->m.userptr)); bufferCount++; if (bufferCount == m_reqBuffCnt) mapVideoFrames(m_videoWidth, m_videoHeight); } #endif } // no break; case VIDIOC_STREAMON: case VIDIOC_STREAMOFF: case VIDIOC_DQBUF: case VIDIOC_QUERYCAP: ret = V4l2CodecBase::ioctl(command, arg); break; case VIDIOC_REQBUFS: { ret = V4l2CodecBase::ioctl(command, arg); ASSERT(ret == 0); int port = -1; struct v4l2_requestbuffers *reqbufs = static_cast<struct v4l2_requestbuffers *>(arg); GET_PORT_INDEX(port, reqbufs->type, ret); if (port == OUTPUT) { #if ANDROID if (reqbufs->count) m_reqBuffCnt = reqbufs->count; else m_videoFrames.clear(); #else if (!reqbufs->count) { m_eglVaapiImages.clear(); } else { const VideoFormatInfo* outFormat = m_decoder->getFormatInfo(); ASSERT(outFormat && outFormat->width && outFormat->height); ASSERT(m_eglVaapiImages.empty()); for (uint32_t i = 0; i < reqbufs->count; i++) { SharedPtr<EglVaapiImage> image( new EglVaapiImage(m_decoder->getDisplayID(), outFormat->width, outFormat->height)); if (!image->init()) { ERROR("Create egl vaapi image failed"); ret = -1; break; } m_eglVaapiImages.push_back(image); } } #endif } break; } case VIDIOC_QUERYBUF: { struct v4l2_buffer *buffer = static_cast<struct v4l2_buffer*>(arg); GET_PORT_INDEX(port, buffer->type, ret); ASSERT(buffer->memory == m_memoryMode[port]); ASSERT(buffer->index < m_maxBufferCount[port]); ASSERT(buffer->length == m_bufferPlaneCount[port]); ASSERT(m_maxBufferSize[port] > 0); if (port == INPUT) { ASSERT(buffer->memory == V4L2_MEMORY_MMAP); buffer->m.planes[0].length = m_maxBufferSize[INPUT]; buffer->m.planes[0].m.mem_offset = m_maxBufferSize[INPUT] * buffer->index; } else if (port == OUTPUT) { ASSERT(m_maxBufferSize[INPUT] && m_maxBufferCount[INPUT]); // plus input buffer space size, it will be minused in mmap buffer->m.planes[0].m.mem_offset = m_maxBufferSize[OUTPUT] * buffer->index; buffer->m.planes[0].m.mem_offset += m_maxBufferSize[INPUT] * m_maxBufferCount[INPUT]; buffer->m.planes[0].length = m_videoWidth * m_videoHeight; buffer->m.planes[1].m.mem_offset = buffer->m.planes[0].m.mem_offset + buffer->m.planes[0].length; buffer->m.planes[1].length = ((m_videoWidth+1)/2*2) * ((m_videoHeight+1)/2); } } break; case VIDIOC_S_FMT: { struct v4l2_format *format = static_cast<struct v4l2_format *>(arg); ASSERT(!m_streamOn[INPUT] && !m_streamOn[OUTPUT]); if (format->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { // ::Initialize uint32_t size; memcpy(&size, format->fmt.raw_data, sizeof(uint32_t)); if(size <= (sizeof(format->fmt.raw_data)-sizeof(uint32_t))) { uint8_t *ptr = format->fmt.raw_data; ptr += sizeof(uint32_t); m_codecData.assign(ptr, ptr + size); } else { ret = -1; ERROR("unvalid codec size"); } //ASSERT(format->fmt.pix_mp.pixelformat == V4L2_PIX_FMT_NV12M); } else if (format->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { // ::CreateInputBuffers ASSERT(format->fmt.pix_mp.num_planes == 1); ASSERT(format->fmt.pix_mp.plane_fmt[0].sizeimage); m_codecData.clear(); m_decoder.reset( createVideoDecoder(mimeFromV4l2PixelFormat(format->fmt.pix_mp.pixelformat)), releaseVideoDecoder); ASSERT(m_decoder); if (!m_decoder) { ret = -1; } m_maxBufferSize[INPUT] = format->fmt.pix_mp.plane_fmt[0].sizeimage; } else { ret = -1; ERROR("unknow type: %d of setting format VIDIOC_S_FMT", format->type); } } break; case VIDIOC_SUBSCRIBE_EVENT: { // ::Initialize struct v4l2_event_subscription *sub = static_cast<struct v4l2_event_subscription*>(arg); ASSERT(sub->type == V4L2_EVENT_RESOLUTION_CHANGE); // resolution change event is must, we always do so } break; case VIDIOC_DQEVENT: { // ::DequeueEvents struct v4l2_event *ev = static_cast<struct v4l2_event*>(arg); // notify resolution change if (hasCodecEvent()) { ev->type = V4L2_EVENT_RESOLUTION_CHANGE; clearCodecEvent(); } else ret = -1; } break; case VIDIOC_G_FMT: { // ::GetFormatInfo struct v4l2_format* format = static_cast<struct v4l2_format*>(arg); ASSERT(format->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); ASSERT(m_decoder); const VideoFormatInfo* outFormat = m_decoder->getFormatInfo(); if (format && outFormat && outFormat->width && outFormat->height) { format->fmt.pix_mp.num_planes = m_bufferPlaneCount[OUTPUT]; format->fmt.pix_mp.width = outFormat->width; format->fmt.pix_mp.height = outFormat->height; // XXX assumed output format and pitch #ifdef ANDROID format->fmt.pix_mp.pixelformat = HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL; #else format->fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; #endif format->fmt.pix_mp.plane_fmt[0].bytesperline = outFormat->width; format->fmt.pix_mp.plane_fmt[1].bytesperline = outFormat->width % 2 ? outFormat->width+1 : outFormat->width; m_videoWidth = outFormat->width; m_videoHeight = outFormat->height; m_maxBufferSize[OUTPUT] = m_videoWidth * m_videoHeight + ((m_videoWidth +1)/2*2) * ((m_videoHeight+1)/2); } else { ret = -1; // chromeos accepts EINVAL as not enough input data yet, will try it again. errno = EINVAL; } } break; case VIDIOC_G_CTRL: { // ::CreateOutputBuffers struct v4l2_control* ctrl = static_cast<struct v4l2_control*>(arg); ASSERT(ctrl->id == V4L2_CID_MIN_BUFFERS_FOR_CAPTURE); ASSERT(m_decoder); // VideoFormatInfo* outFormat = m_decoder->getFormatInfo(); ctrl->value = 0; // no need report dpb size, we hold all buffers in decoder. } break; case VIDIOC_ENUM_FMT: { struct v4l2_fmtdesc *fmtdesc = static_cast<struct v4l2_fmtdesc *>(arg); if ((fmtdesc->index == 0) && (fmtdesc->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) { fmtdesc->pixelformat = V4L2_PIX_FMT_NV12M; } else if ((fmtdesc->index == 0) && (fmtdesc->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)) { fmtdesc->pixelformat = V4L2_PIX_FMT_VP8; } else if ((fmtdesc->index == 1) && (fmtdesc->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)) { fmtdesc->pixelformat = V4L2_PIX_FMT_VP9; } else { ret = -1; } } break; case VIDIOC_G_CROP: { struct v4l2_crop* crop= static_cast<struct v4l2_crop *>(arg); ASSERT(crop->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); ASSERT(m_decoder); const VideoFormatInfo* outFormat = m_decoder->getFormatInfo(); if (outFormat && outFormat->width && outFormat->height) { crop->c.left = 0; crop->c.top = 0; crop->c.width = outFormat->width; crop->c.height = outFormat->height; } else { ret = -1; } } break; default: ret = -1; ERROR("unknown ioctrl command: %d", command); break; } if (ret == -1 && errno != EAGAIN) { // ERROR("ioctl failed"); WARNING("ioctl command: %s failed", IoctlCommandString(command)); } return ret; }