gboolean gst_imx_ipu_blitter_set_output_buffer(GstImxIpuBlitter *ipu_blitter, GstBuffer *output_buffer) { g_assert(output_buffer != NULL); GST_IMX_FILL_IPU_TASK(ipu_blitter, output_buffer, ipu_blitter->priv->task.output); return TRUE; }
gboolean gst_imx_ipu_blitter_set_actual_input_buffer(GstImxIpuBlitter *ipu_blitter, GstBuffer *actual_input_buffer) { g_assert(actual_input_buffer != NULL); GST_IMX_FILL_IPU_TASK(ipu_blitter, actual_input_buffer, ipu_blitter->priv->task.input); ipu_blitter->actual_input_buffer = actual_input_buffer; return TRUE; }
static gboolean gst_imx_ipu_blitter_set_output_frame(GstImxBaseBlitter *base_blitter, GstBuffer *output_frame) { GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(base_blitter); GST_IMX_FILL_IPU_TASK(ipu_blitter, output_frame, ipu_blitter->priv->task.output); ipu_blitter->output_region_uptodate = FALSE; return TRUE; }
static gboolean gst_imx_ipu_blitter_blit_frame(GstImxBaseBlitter *base_blitter, GstImxBaseBlitterRegion const *input_region) { int ret; GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(base_blitter); char fourcc[5]; ipu_blitter->priv->task.input.crop.pos.x = input_region->x1; ipu_blitter->priv->task.input.crop.pos.y = input_region->y1; ipu_blitter->priv->task.input.crop.w = input_region->x2 - input_region->x1; ipu_blitter->priv->task.input.crop.h = input_region->y2 - input_region->y1; gst_imx_ipu_blitter_print_ipu_fourcc(ipu_blitter->priv->task.input.format, fourcc); GST_LOG_OBJECT( ipu_blitter, "task input: width: %u height: %u format: 0x%x (%s) crop: %u,%u %ux%u phys addr %" GST_IMX_PHYS_ADDR_FORMAT " deinterlace enable %u motion 0x%x", ipu_blitter->priv->task.input.width, ipu_blitter->priv->task.input.height, ipu_blitter->priv->task.input.format, fourcc, ipu_blitter->priv->task.input.crop.pos.x, ipu_blitter->priv->task.input.crop.pos.y, ipu_blitter->priv->task.input.crop.w, ipu_blitter->priv->task.input.crop.h, (gst_imx_phys_addr_t)(ipu_blitter->priv->task.input.paddr), ipu_blitter->priv->task.input.deinterlace.enable, ipu_blitter->priv->task.input.deinterlace.motion ); gst_imx_ipu_blitter_print_ipu_fourcc(ipu_blitter->priv->task.output.format, fourcc); GST_LOG_OBJECT( ipu_blitter, "task output: width: %u height: %u format: 0x%x (%s) crop: %u,%u %ux%u paddr %" GST_IMX_PHYS_ADDR_FORMAT " rotate: %u", ipu_blitter->priv->task.output.width, ipu_blitter->priv->task.output.height, ipu_blitter->priv->task.output.format, fourcc, ipu_blitter->priv->task.output.crop.pos.x, ipu_blitter->priv->task.output.crop.pos.y, ipu_blitter->priv->task.output.crop.w, ipu_blitter->priv->task.output.crop.h, (gst_imx_phys_addr_t)(ipu_blitter->priv->task.output.paddr), ipu_blitter->priv->task.output.rotate ); /* Clear empty regions if necessary * Do so by clearing the entire output region * XXX this is necessary because unlike G2D, the IPU has problems with * pixel perfect positioning, that is, neighbouring regions sometimes * have a few pixels of space between them */ if (!(ipu_blitter->output_region_uptodate)) { struct ipu_task task; GstImxBaseBlitterRegion *output_region = &(ipu_blitter->output_region); GST_LOG_OBJECT(ipu_blitter, "need to clear empty regions"); /* Copy main task object, and replace its input data with the one * for the dummy input object. This way, the data for the output * is copied implicitely as well. */ task = ipu_blitter->priv->task; GST_IMX_FILL_IPU_TASK(ipu_blitter, ipu_blitter->dummy_black_buffer, task.input); task.input.deinterlace.enable = 0; task.input.crop.pos.x = 0; task.input.crop.pos.y = 0; task.input.crop.w = task.input.width; task.input.crop.h = task.input.height; task.output.rotate = IPU_ROTATE_NONE; task.output.crop.pos.x = output_region->x1; task.output.crop.pos.y = output_region->y1; task.output.crop.w = output_region->x2 - output_region->x1; task.output.crop.h = output_region->y2 - output_region->y1; GST_LOG_OBJECT( ipu_blitter, "clear op task input: width: %u height: %u format: 0x%x crop: %u,%u %ux%u phys addr %" GST_IMX_PHYS_ADDR_FORMAT " deinterlace enable %u motion 0x%x", task.input.width, task.input.height, task.input.format, task.input.crop.pos.x, task.input.crop.pos.y, task.input.crop.w, task.input.crop.h, (gst_imx_phys_addr_t)(task.input.paddr), task.input.deinterlace.enable, task.input.deinterlace.motion ); GST_LOG_OBJECT( ipu_blitter, "clear op task output: width: %u height: %u format: 0x%x crop: %u,%u %ux%u paddr %" GST_IMX_PHYS_ADDR_FORMAT " rotate: %u", task.output.width, task.output.height, task.output.format, task.output.crop.pos.x, task.output.crop.pos.y, task.output.crop.w, task.output.crop.h, (gst_imx_phys_addr_t)(task.output.paddr), task.output.rotate ); ret = ioctl(gst_imx_ipu_get_fd(), IPU_QUEUE_TASK, &task); if (ret == -1) GST_ERROR_OBJECT(ipu_blitter, "queuing IPU task failed: %s", strerror(errno)); ipu_blitter->output_region_uptodate = TRUE; } /* The actual blit operation * Input and output frame are assumed to be set up properly at this point */ ret = ioctl(gst_imx_ipu_get_fd(), IPU_QUEUE_TASK, &(ipu_blitter->priv->task)); if (ipu_blitter->deinterlace_mode == GST_IMX_IPU_BLITTER_DEINTERLACE_SLOW_MOTION) { gst_imx_ipu_blitter_clear_previous_buffer(ipu_blitter); if (ipu_blitter->current_frame != NULL) { ipu_blitter->previous_frame = gst_buffer_ref(ipu_blitter->current_frame); ipu_blitter->current_frame = NULL; } } if (ret == -1) { GST_ERROR_OBJECT(ipu_blitter, "queuing IPU task failed: %s", strerror(errno)); return FALSE; } return TRUE; }
static gboolean gst_imx_ipu_blitter_set_input_frame(GstImxBaseBlitter *base_blitter, GstBuffer *input_frame) { GstImxIpuBlitter *ipu_blitter = GST_IMX_IPU_BLITTER(base_blitter); GST_IMX_FILL_IPU_TASK(ipu_blitter, input_frame, ipu_blitter->priv->task.input); ipu_blitter->current_frame = input_frame; ipu_blitter->priv->task.input.deinterlace.enable = 0; if (ipu_blitter->deinterlace_mode != GST_IMX_IPU_BLITTER_DEINTERLACE_NONE) { switch (ipu_blitter->input_video_info.interlace_mode) { case GST_VIDEO_INTERLACE_MODE_INTERLEAVED: GST_LOG_OBJECT(ipu_blitter, "input stream uses interlacing -> deinterlacing enabled"); ipu_blitter->priv->task.input.deinterlace.enable = 1; break; case GST_VIDEO_INTERLACE_MODE_MIXED: { if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_INTERLACED)) { GST_LOG_OBJECT(ipu_blitter, "frame has deinterlacing flag"); ipu_blitter->priv->task.input.deinterlace.enable = 1; } else GST_LOG_OBJECT(ipu_blitter, "frame has no deinterlacing flag"); break; } case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: GST_LOG_OBJECT(ipu_blitter, "input stream is progressive -> no deinterlacing necessary"); break; case GST_VIDEO_INTERLACE_MODE_FIELDS: GST_FIXME_OBJECT(ipu_blitter, "2-fields deinterlacing not supported (yet)"); break; default: GST_LOG_OBJECT(ipu_blitter, "input stream uses unknown interlacing mode -> no deinterlacing performed"); break; } } ipu_blitter->priv->task.input.paddr_n = 0; if (ipu_blitter->priv->task.input.deinterlace.enable) { if (GST_BUFFER_FLAG_IS_SET(input_frame, GST_VIDEO_BUFFER_FLAG_TFF)) { GST_LOG_OBJECT(ipu_blitter, "interlaced with top field first"); ipu_blitter->priv->task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_TOP; } else { GST_LOG_OBJECT(ipu_blitter, "interlaced with bottom field first"); ipu_blitter->priv->task.input.deinterlace.field_fmt = IPU_DEINTERLACE_FIELD_BOTTOM; } // ipu_blitter->priv->task.input.deinterlace.field_fmt |= IPU_DEINTERLACE_RATE_MASK; } else { ipu_blitter->priv->task.input.deinterlace.motion = MED_MOTION; } return TRUE; }