void decode_jpeg(struct jpeg_t *jpeg) { if (!ve_open()) err(EXIT_FAILURE, "Can't open VE"); int input_size =(jpeg->data_len + 65535) & ~65535; uint8_t *input_buffer = ve_malloc(input_size); int output_size = ((jpeg->width + 31) & ~31) * ((jpeg->height + 31) & ~31); uint8_t *luma_output = ve_malloc(output_size); uint8_t *chroma_output = ve_malloc(output_size); memcpy(input_buffer, jpeg->data, jpeg->data_len); ve_flush_cache(input_buffer, jpeg->data_len); // activate MPEG engine void *ve_regs = ve_get(VE_ENGINE_MPEG, 0); // set restart interval writel(jpeg->restart_interval, ve_regs + VE_MPEG_JPEG_RES_INT); // set JPEG format set_format(jpeg, ve_regs); // set output buffers (Luma / Croma) writel(ve_virt2phys(luma_output), ve_regs + VE_MPEG_ROT_LUMA); writel(ve_virt2phys(chroma_output), ve_regs + VE_MPEG_ROT_CHROMA); // set size set_size(jpeg, ve_regs); // ?? writel(0x00000000, ve_regs + VE_MPEG_SDROT_CTRL); // input end writel(ve_virt2phys(input_buffer) + input_size - 1, ve_regs + VE_MPEG_VLD_END); // ?? writel(0x0000007c, ve_regs + VE_MPEG_CTRL); // set input offset in bits writel(0 * 8, ve_regs + VE_MPEG_VLD_OFFSET); // set input length in bits writel(jpeg->data_len * 8, ve_regs + VE_MPEG_VLD_LEN); // set input buffer writel(ve_virt2phys(input_buffer) | 0x70000000, ve_regs + VE_MPEG_VLD_ADDR); // set Quantisation Table set_quantization_tables(jpeg, ve_regs); // set Huffman Table writel(0x00000000, ve_regs + VE_MPEG_RAM_WRITE_PTR); set_huffman_tables(jpeg, ve_regs); // start writeb(0x0e, ve_regs + VE_MPEG_TRIGGER); // wait for interrupt ve_wait(1); // clean interrupt flag (??) writel(0x0000c00f, ve_regs + VE_MPEG_STATUS); // stop MPEG engine ve_put(); //output_ppm(stdout, jpeg, output, output + (output_buf_size / 2)); if (!disp_open()) { fprintf(stderr, "Can't open /dev/disp\n"); return; } int color; switch ((jpeg->comp[0].samp_h << 4) | jpeg->comp[0].samp_v) { case 0x11: case 0x21: color = COLOR_YUV422; break; case 0x12: case 0x22: default: color = COLOR_YUV420; break; } disp_set_para(ve_virt2phys(luma_output), ve_virt2phys(chroma_output), color, jpeg->width, jpeg->height, 0, 0, 800, 600); getchar(); disp_close(); ve_free(input_buffer); ve_free(luma_output); ve_free(chroma_output); ve_close(); }
int h264enc_encode_picture(h264enc *c) { c->current_slice_type = c->current_frame_num ? SLICE_P : SLICE_I; c->regs = ve_get(VE_ENGINE_AVC, 0); /* flush buffers (output because otherwise we might read old data later) */ ve_flush_cache(c->bytestream_buffer, c->bytestream_buffer_size); ve_flush_cache(c->luma_buffer, c->input_buffer_size); /* set output buffer */ writel(0x0, c->regs + VE_AVC_VLE_OFFSET); writel(ve_virt2phys(c->bytestream_buffer), c->regs + VE_AVC_VLE_ADDR); writel(ve_virt2phys(c->bytestream_buffer) + c->bytestream_buffer_size - 1, c->regs + VE_AVC_VLE_END); writel(c->bytestream_buffer_size * 8, c->regs + VE_AVC_VLE_MAX); /* write headers */ if (c->write_sps_pps) { put_seq_parameter_set(c); put_pic_parameter_set(c); c->write_sps_pps = 0; } put_slice_header(c); /* set input size */ writel(c->mb_stride << 16, c->regs + VE_ISP_INPUT_STRIDE); writel((c->mb_width << 16) | (c->mb_height << 0), c->regs + VE_ISP_INPUT_SIZE); /* set input format */ writel(c->input_color_format << 29, c->regs + VE_ISP_CTRL); /* set input buffer */ writel(ve_virt2phys(c->luma_buffer), c->regs + VE_ISP_INPUT_LUMA); writel(ve_virt2phys(c->chroma_buffer), c->regs + VE_ISP_INPUT_CHROMA); /* set reconstruction buffers */ struct h264enc_ref_pic *ref_pic = &c->ref_picture[c->current_frame_num % 2]; writel(ve_virt2phys(ref_pic->luma_buffer), c->regs + VE_AVC_REC_LUMA); writel(ve_virt2phys(ref_pic->chroma_buffer), c->regs + VE_AVC_REC_CHROMA); writel(ve_virt2phys(ref_pic->extra_buffer), c->regs + VE_AVC_REC_SLUMA); /* set reference buffers */ if (c->current_slice_type != SLICE_I) { ref_pic = &c->ref_picture[(c->current_frame_num + 1) % 2]; writel(ve_virt2phys(ref_pic->luma_buffer), c->regs + VE_AVC_REF_LUMA); writel(ve_virt2phys(ref_pic->chroma_buffer), c->regs + VE_AVC_REF_CHROMA); writel(ve_virt2phys(ref_pic->extra_buffer), c->regs + VE_AVC_REF_SLUMA); } /* set unknown purpose buffers */ writel(ve_virt2phys(c->extra_buffer_line), c->regs + VE_AVC_MB_INFO); writel(ve_virt2phys(c->extra_buffer_frame), c->regs + VE_AVC_UNK_BUF); /* enable interrupt and clear status flags */ writel(readl(c->regs + VE_AVC_CTRL) | 0xf, c->regs + VE_AVC_CTRL); writel(readl(c->regs + VE_AVC_STATUS) | 0x7, c->regs + VE_AVC_STATUS); /* set encoding parameters */ uint32_t params = 0x0; if (c->entropy_coding_mode_flag) params |= 0x100; if (c->current_slice_type == SLICE_P) params |= 0x10; writel(params, c->regs + VE_AVC_PARAM); writel((4 << 16) | (c->pic_init_qp << 8) | c->pic_init_qp, c->regs + VE_AVC_QP); writel(0x00000104, c->regs + VE_AVC_MOTION_EST); /* trigger encoding */ writel(0x8, c->regs + VE_AVC_TRIGGER); ve_wait(1); /* check result */ uint32_t status = readl(c->regs + VE_AVC_STATUS); writel(status, c->regs + VE_AVC_STATUS); /* save bytestream length */ c->bytestream_length = readl(c->regs + VE_AVC_VLE_LENGTH) / 8; /* next frame */ c->current_frame_num++; if (c->current_frame_num >= c->keyframe_interval) c->current_frame_num = 0; ve_put(); return (status & 0x3) == 0x1; }