void frame_show(struct frame_t *frame) { static int disp_initialized = 0; static int frame_id = 0; if (!disp_initialized) { if (!disp_open()) { fprintf(stderr, "Can't open /dev/disp\n"); return; } disp_set_para(ve_virt2phys(frame->luma_buffer), ve_virt2phys(frame->chroma_buffer), frame->color, frame->width, frame->height, 0, 0, 800, 600); disp_initialized = 1; } disp_new_frame(ve_virt2phys(frame->luma_buffer), ve_virt2phys(frame->chroma_buffer), frame_id++, 24000); }
VdpStatus vdp_presentation_queue_display(VdpPresentationQueue presentation_queue, VdpOutputSurface surface, uint32_t clip_width, uint32_t clip_height, VdpTime earliest_presentation_time) { queue_ctx_t *q = handle_get(presentation_queue); if (!q) return VDP_STATUS_INVALID_HANDLE; output_surface_ctx_t *os = handle_get(surface); if (!os) return VDP_STATUS_INVALID_HANDLE; if (earliest_presentation_time != 0) VDPAU_DBG_ONCE("Presentation time not supported"); Window c; int x,y; XTranslateCoordinates(q->device->display, q->target->drawable, RootWindow(q->device->display, q->device->screen), 0, 0, &x, &y, &c); XClearWindow(q->device->display, q->target->drawable); if (os->vs) { // VIDEO layer __disp_layer_info_t layer_info; memset(&layer_info, 0, sizeof(layer_info)); layer_info.pipe = q->device->osd_enabled ? 0 : 1; layer_info.mode = DISP_LAYER_WORK_MODE_SCALER; layer_info.fb.format = DISP_FORMAT_YUV420; layer_info.fb.seq = DISP_SEQ_UVUV; switch (os->vs->source_format) { case VDP_YCBCR_FORMAT_YUYV: layer_info.fb.mode = DISP_MOD_INTERLEAVED; layer_info.fb.format = DISP_FORMAT_YUV422; layer_info.fb.seq = DISP_SEQ_YUYV; break; case VDP_YCBCR_FORMAT_UYVY: layer_info.fb.mode = DISP_MOD_INTERLEAVED; layer_info.fb.format = DISP_FORMAT_YUV422; layer_info.fb.seq = DISP_SEQ_UYVY; break; case VDP_YCBCR_FORMAT_NV12: layer_info.fb.mode = DISP_MOD_NON_MB_UV_COMBINED; break; case VDP_YCBCR_FORMAT_YV12: layer_info.fb.mode = DISP_MOD_NON_MB_PLANAR; break; default: case INTERNAL_YCBCR_FORMAT: layer_info.fb.mode = DISP_MOD_MB_UV_COMBINED; break; } layer_info.fb.br_swap = 0; layer_info.fb.addr[0] = ve_virt2phys(os->vs->data) + 0x40000000; layer_info.fb.addr[1] = ve_virt2phys(os->vs->data + os->vs->plane_size) + 0x40000000; layer_info.fb.addr[2] = ve_virt2phys(os->vs->data + os->vs->plane_size + os->vs->plane_size / 4) + 0x40000000; layer_info.fb.cs_mode = DISP_BT601; layer_info.fb.size.width = os->vs->width; layer_info.fb.size.height = os->vs->height; layer_info.src_win.x = os->video_src_rect.x0; layer_info.src_win.y = os->video_src_rect.y0; layer_info.src_win.width = os->video_src_rect.x1 - os->video_src_rect.x0; layer_info.src_win.height = os->video_src_rect.y1 - os->video_src_rect.y0; layer_info.scn_win.x = x + os->video_dst_rect.x0; layer_info.scn_win.y = y + os->video_dst_rect.y0; layer_info.scn_win.width = os->video_dst_rect.x1 - os->video_dst_rect.x0; layer_info.scn_win.height = os->video_dst_rect.y1 - os->video_dst_rect.y0; layer_info.ck_enable = q->device->osd_enabled ? 0 : 1; if (layer_info.scn_win.y < 0) { int cutoff = -(layer_info.scn_win.y); layer_info.src_win.y += cutoff; layer_info.src_win.height -= cutoff; layer_info.scn_win.y = 0; layer_info.scn_win.height -= cutoff; } uint32_t args[4] = { 0, q->target->layer, (unsigned long)(&layer_info), 0 }; ioctl(q->target->fd, DISP_CMD_LAYER_SET_PARA, args); ioctl(q->target->fd, DISP_CMD_LAYER_OPEN, args); // Note: might be more reliable (but slower and problematic when there // are driver issues and the GET functions return wrong values) to query the // old values instead of relying on our internal csc_change. // Since the driver calculates a matrix out of these values after each // set doing this unconditionally is costly. if (os->csc_change) { ioctl(q->target->fd, DISP_CMD_LAYER_ENHANCE_OFF, args); args[2] = 0xff * os->brightness + 0x20; ioctl(q->target->fd, DISP_CMD_LAYER_SET_BRIGHT, args); args[2] = 0x20 * os->contrast; ioctl(q->target->fd, DISP_CMD_LAYER_SET_CONTRAST, args); args[2] = 0x20 * os->saturation; ioctl(q->target->fd, DISP_CMD_LAYER_SET_SATURATION, args); // hue scale is randomly chosen, no idea how it maps exactly args[2] = (32 / 3.14) * os->hue + 0x20; ioctl(q->target->fd, DISP_CMD_LAYER_SET_HUE, args); ioctl(q->target->fd, DISP_CMD_LAYER_ENHANCE_ON, args); os->csc_change = 0; } } else { uint32_t args[4] = { 0, q->target->layer, 0, 0 }; ioctl(q->target->fd, DISP_CMD_LAYER_CLOSE, args); } if (!q->device->osd_enabled) return VDP_STATUS_OK; if (os->rgba.flags & RGBA_FLAG_NEEDS_CLEAR) rgba_clear(&os->rgba); if (os->rgba.flags & RGBA_FLAG_DIRTY) { // TOP layer rgba_flush(&os->rgba); __disp_layer_info_t layer_info; memset(&layer_info, 0, sizeof(layer_info)); layer_info.pipe = 1; layer_info.mode = DISP_LAYER_WORK_MODE_NORMAL; layer_info.fb.mode = DISP_MOD_INTERLEAVED; layer_info.fb.format = DISP_FORMAT_ARGB8888; layer_info.fb.seq = DISP_SEQ_ARGB; switch (os->rgba.format) { case VDP_RGBA_FORMAT_R8G8B8A8: layer_info.fb.br_swap = 1; break; case VDP_RGBA_FORMAT_B8G8R8A8: default: layer_info.fb.br_swap = 0; break; } layer_info.fb.addr[0] = ve_virt2phys(os->rgba.data) + 0x40000000; layer_info.fb.cs_mode = DISP_BT601; layer_info.fb.size.width = os->rgba.width; layer_info.fb.size.height = os->rgba.height; layer_info.src_win.x = os->rgba.dirty.x0; layer_info.src_win.y = os->rgba.dirty.y0; layer_info.src_win.width = os->rgba.dirty.x1 - os->rgba.dirty.x0; layer_info.src_win.height = os->rgba.dirty.y1 - os->rgba.dirty.y0; layer_info.scn_win.x = x + os->rgba.dirty.x0; layer_info.scn_win.y = y + os->rgba.dirty.y0; layer_info.scn_win.width = min_nz(clip_width, os->rgba.dirty.x1) - os->rgba.dirty.x0; layer_info.scn_win.height = min_nz(clip_height, os->rgba.dirty.y1) - os->rgba.dirty.y0; uint32_t args[4] = { 0, q->target->layer_top, (unsigned long)(&layer_info), 0 }; ioctl(q->target->fd, DISP_CMD_LAYER_SET_PARA, args); ioctl(q->target->fd, DISP_CMD_LAYER_OPEN, args); } else { uint32_t args[4] = { 0, q->target->layer_top, 0, 0 }; ioctl(q->target->fd, DISP_CMD_LAYER_CLOSE, args); } return VDP_STATUS_OK; }
void decode_jpeg(struct jpeg_t *jpeg) { if (!ve_open()) err(EXIT_FAILURE, "Can't open VE"); int input_size =(jpeg->data_len + 65535) & ~65535; uint8_t *input_buffer = ve_malloc(input_size); int output_size = ((jpeg->width + 31) & ~31) * ((jpeg->height + 31) & ~31); uint8_t *luma_output = ve_malloc(output_size); uint8_t *chroma_output = ve_malloc(output_size); memcpy(input_buffer, jpeg->data, jpeg->data_len); ve_flush_cache(input_buffer, jpeg->data_len); // activate MPEG engine void *ve_regs = ve_get(VE_ENGINE_MPEG, 0); // set restart interval writel(jpeg->restart_interval, ve_regs + VE_MPEG_JPEG_RES_INT); // set JPEG format set_format(jpeg, ve_regs); // set output buffers (Luma / Croma) writel(ve_virt2phys(luma_output), ve_regs + VE_MPEG_ROT_LUMA); writel(ve_virt2phys(chroma_output), ve_regs + VE_MPEG_ROT_CHROMA); // set size set_size(jpeg, ve_regs); // ?? writel(0x00000000, ve_regs + VE_MPEG_SDROT_CTRL); // input end writel(ve_virt2phys(input_buffer) + input_size - 1, ve_regs + VE_MPEG_VLD_END); // ?? writel(0x0000007c, ve_regs + VE_MPEG_CTRL); // set input offset in bits writel(0 * 8, ve_regs + VE_MPEG_VLD_OFFSET); // set input length in bits writel(jpeg->data_len * 8, ve_regs + VE_MPEG_VLD_LEN); // set input buffer writel(ve_virt2phys(input_buffer) | 0x70000000, ve_regs + VE_MPEG_VLD_ADDR); // set Quantisation Table set_quantization_tables(jpeg, ve_regs); // set Huffman Table writel(0x00000000, ve_regs + VE_MPEG_RAM_WRITE_PTR); set_huffman_tables(jpeg, ve_regs); // start writeb(0x0e, ve_regs + VE_MPEG_TRIGGER); // wait for interrupt ve_wait(1); // clean interrupt flag (??) writel(0x0000c00f, ve_regs + VE_MPEG_STATUS); // stop MPEG engine ve_put(); //output_ppm(stdout, jpeg, output, output + (output_buf_size / 2)); if (!disp_open()) { fprintf(stderr, "Can't open /dev/disp\n"); return; } int color; switch ((jpeg->comp[0].samp_h << 4) | jpeg->comp[0].samp_v) { case 0x11: case 0x21: color = COLOR_YUV422; break; case 0x12: case 0x22: default: color = COLOR_YUV420; break; } disp_set_para(ve_virt2phys(luma_output), ve_virt2phys(chroma_output), color, jpeg->width, jpeg->height, 0, 0, 800, 600); getchar(); disp_close(); ve_free(input_buffer); ve_free(luma_output); ve_free(chroma_output); ve_close(); }
void decode_mpeg(struct frame_buffers_t *frame_buffers, const struct mpeg_t * const mpeg) { int input_size = (mpeg->len + 65535) & ~65535; uint8_t *input_buffer = ve_malloc(input_size); memcpy(input_buffer, mpeg->data, mpeg->len); ve_flush_cache(input_buffer, mpeg->len); void *ve_regs = ve_get_regs(); // set quantisation tables set_quantization_tables(ve_regs, mpeg_default_intra_quant, mpeg_default_non_intra_quant); // set size uint16_t width = (mpeg->width + 15) / 16; uint16_t height = (mpeg->height + 15) / 16; writel(ve_regs + 0x100 + 0x08, (width << 8) | height); writel(ve_regs + 0x100 + 0x0c, ((width * 16) << 16) | (height * 16)); // set picture header uint32_t pic_header = 0x00000000; pic_header |= ((mpeg->picture_coding_type & 0xf) << 28); pic_header |= ((mpeg->f_code[0][0] & 0xf) << 24); pic_header |= ((mpeg->f_code[0][1] & 0xf) << 20); pic_header |= ((mpeg->f_code[1][0] & 0xf) << 16); pic_header |= ((mpeg->f_code[1][1] & 0xf) << 12); pic_header |= ((mpeg->intra_dc_precision & 0x3) << 10); pic_header |= ((mpeg->picture_structure & 0x3) << 8); pic_header |= ((mpeg->top_field_first & 0x1) << 7); pic_header |= ((mpeg->frame_pred_frame_dct & 0x1) << 6); pic_header |= ((mpeg->concealment_motion_vectors & 0x1) << 5); pic_header |= ((mpeg->q_scale_type & 0x1) << 4); pic_header |= ((mpeg->intra_vlc_format & 0x1) << 3); pic_header |= ((mpeg->alternate_scan & 0x1) << 2); pic_header |= ((mpeg->full_pel_forward_vector & 0x1) << 1); pic_header |= ((mpeg->full_pel_backward_vector & 0x1) << 0); writel(ve_regs + 0x100 + 0x00, pic_header); // ?? writel(ve_regs + 0x100 + 0x10, 0x00000000); // ?? writel(ve_regs + 0x100 + 0x14, 0x800001b8); // ?? writel(ve_regs + 0x100 + 0xc4, 0x00000000); // ?? writel(ve_regs + 0x100 + 0xc8, 0x00000000); // set forward/backward predicion buffers if (mpeg->picture_coding_type == PCT_I || mpeg->picture_coding_type == PCT_P) { frame_unref(frame_buffers->forward); frame_buffers->forward = frame_ref(frame_buffers->backward); frame_unref(frame_buffers->backward); frame_buffers->backward = frame_ref(frame_buffers->output); } writel(ve_regs + 0x100 + 0x50, ve_virt2phys(frame_buffers->forward->luma_buffer)); writel(ve_regs + 0x100 + 0x54, ve_virt2phys(frame_buffers->forward->chroma_buffer)); writel(ve_regs + 0x100 + 0x58, ve_virt2phys(frame_buffers->backward->luma_buffer)); writel(ve_regs + 0x100 + 0x5c, ve_virt2phys(frame_buffers->backward->chroma_buffer)); // set output buffers (Luma / Croma) writel(ve_regs + 0x100 + 0x48, ve_virt2phys(frame_buffers->output->luma_buffer)); writel(ve_regs + 0x100 + 0x4c, ve_virt2phys(frame_buffers->output->chroma_buffer)); writel(ve_regs + 0x100 + 0xcc, ve_virt2phys(frame_buffers->output->luma_buffer)); writel(ve_regs + 0x100 + 0xd0, ve_virt2phys(frame_buffers->output->chroma_buffer)); // set input offset in bits writel(ve_regs + 0x100 + 0x2c, (mpeg->pos - 4) * 8); // set input length in bits (+ little bit more, else it fails sometimes ??) writel(ve_regs + 0x100 + 0x30, (mpeg->len - (mpeg->pos - 4) + 16) * 8); // input end writel(ve_regs + 0x100 + 0x34, ve_virt2phys(input_buffer) + input_size - 1); // set input buffer writel(ve_regs + 0x100 + 0x28, ve_virt2phys(input_buffer) | 0x50000000); // trigger writel(ve_regs + 0x100 + 0x18, (mpeg->type ? 0x02000000 : 0x01000000) | 0x8000000f); // wait for interrupt ve_wait(1); // clean interrupt flag (??) writel(ve_regs + 0x100 + 0x1c, 0x0000c00f); ve_free(input_buffer); }
int h264enc_encode_picture(h264enc *c) { c->current_slice_type = c->current_frame_num ? SLICE_P : SLICE_I; c->regs = ve_get(VE_ENGINE_AVC, 0); /* flush buffers (output because otherwise we might read old data later) */ ve_flush_cache(c->bytestream_buffer, c->bytestream_buffer_size); ve_flush_cache(c->luma_buffer, c->input_buffer_size); /* set output buffer */ writel(0x0, c->regs + VE_AVC_VLE_OFFSET); writel(ve_virt2phys(c->bytestream_buffer), c->regs + VE_AVC_VLE_ADDR); writel(ve_virt2phys(c->bytestream_buffer) + c->bytestream_buffer_size - 1, c->regs + VE_AVC_VLE_END); writel(c->bytestream_buffer_size * 8, c->regs + VE_AVC_VLE_MAX); /* write headers */ if (c->write_sps_pps) { put_seq_parameter_set(c); put_pic_parameter_set(c); c->write_sps_pps = 0; } put_slice_header(c); /* set input size */ writel(c->mb_stride << 16, c->regs + VE_ISP_INPUT_STRIDE); writel((c->mb_width << 16) | (c->mb_height << 0), c->regs + VE_ISP_INPUT_SIZE); /* set input format */ writel(c->input_color_format << 29, c->regs + VE_ISP_CTRL); /* set input buffer */ writel(ve_virt2phys(c->luma_buffer), c->regs + VE_ISP_INPUT_LUMA); writel(ve_virt2phys(c->chroma_buffer), c->regs + VE_ISP_INPUT_CHROMA); /* set reconstruction buffers */ struct h264enc_ref_pic *ref_pic = &c->ref_picture[c->current_frame_num % 2]; writel(ve_virt2phys(ref_pic->luma_buffer), c->regs + VE_AVC_REC_LUMA); writel(ve_virt2phys(ref_pic->chroma_buffer), c->regs + VE_AVC_REC_CHROMA); writel(ve_virt2phys(ref_pic->extra_buffer), c->regs + VE_AVC_REC_SLUMA); /* set reference buffers */ if (c->current_slice_type != SLICE_I) { ref_pic = &c->ref_picture[(c->current_frame_num + 1) % 2]; writel(ve_virt2phys(ref_pic->luma_buffer), c->regs + VE_AVC_REF_LUMA); writel(ve_virt2phys(ref_pic->chroma_buffer), c->regs + VE_AVC_REF_CHROMA); writel(ve_virt2phys(ref_pic->extra_buffer), c->regs + VE_AVC_REF_SLUMA); } /* set unknown purpose buffers */ writel(ve_virt2phys(c->extra_buffer_line), c->regs + VE_AVC_MB_INFO); writel(ve_virt2phys(c->extra_buffer_frame), c->regs + VE_AVC_UNK_BUF); /* enable interrupt and clear status flags */ writel(readl(c->regs + VE_AVC_CTRL) | 0xf, c->regs + VE_AVC_CTRL); writel(readl(c->regs + VE_AVC_STATUS) | 0x7, c->regs + VE_AVC_STATUS); /* set encoding parameters */ uint32_t params = 0x0; if (c->entropy_coding_mode_flag) params |= 0x100; if (c->current_slice_type == SLICE_P) params |= 0x10; writel(params, c->regs + VE_AVC_PARAM); writel((4 << 16) | (c->pic_init_qp << 8) | c->pic_init_qp, c->regs + VE_AVC_QP); writel(0x00000104, c->regs + VE_AVC_MOTION_EST); /* trigger encoding */ writel(0x8, c->regs + VE_AVC_TRIGGER); ve_wait(1); /* check result */ uint32_t status = readl(c->regs + VE_AVC_STATUS); writel(status, c->regs + VE_AVC_STATUS); /* save bytestream length */ c->bytestream_length = readl(c->regs + VE_AVC_VLE_LENGTH) / 8; /* next frame */ c->current_frame_num++; if (c->current_frame_num >= c->keyframe_interval) c->current_frame_num = 0; ve_put(); return (status & 0x3) == 0x1; }