Exemple #1
0
void frame_unref(struct frame_t *frame)
{
	if (!frame)
		return;

	frame->ref_counter--;

	if (frame->ref_counter <= 0)
	{
		ve_free(frame->luma_buffer);
		ve_free(frame->chroma_buffer);
		free(frame);
	}
}
Exemple #2
0
VdpStatus vdp_decoder_destroy(VdpDecoder decoder)
{
	decoder_ctx_t *dec = handle_get(decoder);
	if (!dec)
		return VDP_STATUS_INVALID_HANDLE;

	if (dec->extra_data)
		ve_free(dec->extra_data);
	ve_free(dec->data);

	handle_destroy(decoder);
	free(dec);

	return VDP_STATUS_OK;
}
VdpStatus vdp_video_surface_destroy(VdpVideoSurface surface)
{
	video_surface_ctx_t *vs = handle_get(surface);
	if (!vs)
		return VDP_STATUS_INVALID_HANDLE;

	if (vs->extra_data)
		ve_free(vs->extra_data);
	ve_free(vs->data);

	handle_destroy(surface);
	free(vs);

	return VDP_STATUS_OK;
}
Exemple #4
0
/** Releases the resources
 *
 * @param str the string to release
 */
void str_free(Str *str)
{
	ve_free(str->data);
	str->data = NULL;
	str->buf_size = 0;
	str->str_len = 0;
	str->error = veTrue;
}
void yuv_unref(yuv_data_t *yuv)
{
	yuv->ref_count--;

	if (yuv->ref_count == 0)
	{
		ve_free(yuv->data);
		free(yuv);
	}
}
Exemple #6
0
veBool pubnub_atPublishN(struct PubnubAt* nubat, char const *buf, size_t buf_len)
{
	struct PubnubRequest *nubreq;
	u8 const *json;
	size_t json_len;

	/* allocate request */
	if (!nubat->g) {
		nubat->g = yajl_gen_alloc(NULL);
		if (!nubat->g)
			return veFalse;
	}

	nubreq = (struct PubnubRequest *) ve_malloc(sizeof(struct PubnubRequest));
	if (!nubreq) {
		/* reuse the nubat->q next time */
		return veFalse;
	}

	pubnub_req_init(&nubat->nub, nubreq, 512, 512);

	/* build json data.. */
	if (yajl_gen_string(nubat->g, (u8*) buf, buf_len) != yajl_gen_status_ok) {
		ve_error("json: not a valid string");
		goto error;
	}

	if (yajl_gen_get_buf(nubat->g, &json, &json_len) != yajl_gen_status_ok) {
		ve_error("json: could not get buf");
		return veFalse;
	}

	/* sent it */
	if (pubnub_publish(nubreq, (char*) json, publish_callback) != RET_OK) {
		ve_error("could not pubnub_publish");
		goto error;
	}

	yajl_gen_clear(nubat->g);	/* empty buffers */
	yajl_gen_free(nubat->g);
	nubat->g = NULL;
	return veTrue;

error:
	pubnub_req_deinit(nubreq);
	ve_free(nubreq);
	yajl_gen_free(nubat->g);
	nubat->g = NULL;

	return veFalse;
}
VdpStatus vdp_video_surface_create(VdpDevice device,
                                   VdpChromaType chroma_type,
                                   uint32_t width,
                                   uint32_t height,
                                   VdpVideoSurface *surface)
{
	if (!surface)
		return VDP_STATUS_INVALID_POINTER;

	if (width < 1 || width > 8192 || height < 1 || height > 8192)
		return VDP_STATUS_INVALID_SIZE;

	device_ctx_t *dev = handle_get(device);
	if (!dev)
		return VDP_STATUS_INVALID_HANDLE;

	video_surface_ctx_t *vs = calloc(1, sizeof(video_surface_ctx_t));
	if (!vs)
		return VDP_STATUS_RESOURCES;

	vs->device = dev;
	vs->width = width;
	vs->height = height;
	vs->chroma_type = chroma_type;

	vs->luma_size = ALIGN(width, 32) * ALIGN(height, 32);

	VdpStatus ret = yuv_new(vs);
	if (ret != VDP_STATUS_OK)
	{
		free(vs);
		return ret;
	}

	int handle = handle_create(vs);
	if (handle == -1)
	{
		ve_free(vs->yuv->data);
		free(vs->yuv);
		free(vs);
		return VDP_STATUS_RESOURCES;
	}

	*surface = handle;

	return VDP_STATUS_OK;
}
Exemple #8
0
static void publish_callback(struct PubnubRequest* req, NubEv ev,
									char const* buf, int buf_len, void *ctx)
{
	struct PubnubAt* nubat = (struct PubnubAt*) ctx;

	switch (ev)
	{
	case NUB_DONE:
		pubnub_req_deinit(req);			/* destruct the request data */
		ve_free(req);					/* free the request itself */
		pubnub_atSubscribe(nubat);		/* wait for commands when idle */
		break;

	default:
		;
	}
}
Exemple #9
0
/** Allocates a larger buffer and copies the existing string.
 *
 * @param str		the string
 * @param newSize	the new size of the buffer, must be larger
 *
 */
static void str_resize(Str *str, u16 newSize)
{
	char *tmp;

	if (str->error)
		return;

	if (str->step == 0)
	{
		str_free(str);
		return;
	}

#ifdef VE_REALLOC_MISSING
	// create new buffer
	tmp = (char*) ve_malloc(newSize);
	if (tmp == NULL)
	{
		str_free(str);
		return;
	}

	// copy contents
	dbg_memset(tmp, 0, newSize);
	if (str->data != NULL)
	{
		strcpy(tmp, str->data);
		ve_free(str->data);
	}

	str->data = tmp;
#else
	tmp = ve_realloc(str->data, newSize);
	if (tmp == NULL)
	{
		str_free(str);
		return;
	}
	str->data = tmp;
#endif

	str->buf_size = newSize;
}
Exemple #10
0
void h264enc_free(h264enc *c)
{
	int i;

	ve_free(c->extra_buffer_line);
	ve_free(c->extra_buffer_frame);
	for (i = 0; i < 2; i++)
	{
		ve_free(c->ref_picture[i].luma_buffer);
		ve_free(c->ref_picture[i].extra_buffer);
	}
	ve_free(c->bytestream_buffer);
	ve_free(c->luma_buffer);
	free(c);
}
Exemple #11
0
void decode_jpeg(struct jpeg_t *jpeg)
{
	if (!ve_open())
		err(EXIT_FAILURE, "Can't open VE");

	int input_size =(jpeg->data_len + 65535) & ~65535;
	uint8_t *input_buffer = ve_malloc(input_size);
	int output_size = ((jpeg->width + 31) & ~31) * ((jpeg->height + 31) & ~31);
	uint8_t *luma_output = ve_malloc(output_size);
	uint8_t *chroma_output = ve_malloc(output_size);
	memcpy(input_buffer, jpeg->data, jpeg->data_len);
	ve_flush_cache(input_buffer, jpeg->data_len);

	// activate MPEG engine
	void *ve_regs = ve_get(VE_ENGINE_MPEG, 0);

	// set restart interval
	writel(jpeg->restart_interval, ve_regs + VE_MPEG_JPEG_RES_INT);

	// set JPEG format
	set_format(jpeg, ve_regs);

	// set output buffers (Luma / Croma)
	writel(ve_virt2phys(luma_output), ve_regs + VE_MPEG_ROT_LUMA);
	writel(ve_virt2phys(chroma_output), ve_regs + VE_MPEG_ROT_CHROMA);

	// set size
	set_size(jpeg, ve_regs);

	// ??
	writel(0x00000000, ve_regs + VE_MPEG_SDROT_CTRL);

	// input end
	writel(ve_virt2phys(input_buffer) + input_size - 1, ve_regs + VE_MPEG_VLD_END);

	// ??
	writel(0x0000007c, ve_regs + VE_MPEG_CTRL);

	// set input offset in bits
	writel(0 * 8, ve_regs + VE_MPEG_VLD_OFFSET);

	// set input length in bits
	writel(jpeg->data_len * 8, ve_regs + VE_MPEG_VLD_LEN);

	// set input buffer
	writel(ve_virt2phys(input_buffer) | 0x70000000, ve_regs + VE_MPEG_VLD_ADDR);

	// set Quantisation Table
	set_quantization_tables(jpeg, ve_regs);

	// set Huffman Table
	writel(0x00000000, ve_regs + VE_MPEG_RAM_WRITE_PTR);
	set_huffman_tables(jpeg, ve_regs);

	// start
	writeb(0x0e, ve_regs + VE_MPEG_TRIGGER);

	// wait for interrupt
	ve_wait(1);

	// clean interrupt flag (??)
	writel(0x0000c00f, ve_regs + VE_MPEG_STATUS);

	// stop MPEG engine
	ve_put();

	//output_ppm(stdout, jpeg, output, output + (output_buf_size / 2));

	if (!disp_open())
	{
		fprintf(stderr, "Can't open /dev/disp\n");
		return;
	}

	int color;
	switch ((jpeg->comp[0].samp_h << 4) | jpeg->comp[0].samp_v)
	{
	case 0x11:
	case 0x21:
		color = COLOR_YUV422;
		break;
	case 0x12:
	case 0x22:
	default:
		color = COLOR_YUV420;
		break;
	}

	disp_set_para(ve_virt2phys(luma_output), ve_virt2phys(chroma_output),
			color, jpeg->width, jpeg->height,
			0, 0, 800, 600);

	getchar();

	disp_close();

	ve_free(input_buffer);
	ve_free(luma_output);
	ve_free(chroma_output);
	ve_close();
}
Exemple #12
0
VdpStatus vdp_decoder_create(VdpDevice device,
                             VdpDecoderProfile profile,
                             uint32_t width,
                             uint32_t height,
                             uint32_t max_references,
                             VdpDecoder *decoder)
{
	device_ctx_t *dev = handle_get(device);
	if (!dev)
		return VDP_STATUS_INVALID_HANDLE;

	if (max_references > 16)
		return VDP_STATUS_ERROR;

	decoder_ctx_t *dec = calloc(1, sizeof(decoder_ctx_t));
	if (!dec)
		goto err_ctx;

	dec->device = dev;
	dec->profile = profile;
	dec->width = width;
	dec->height = height;

	dec->data = ve_malloc(VBV_SIZE);
	if (!(dec->data))
		goto err_data;

	VdpStatus ret;
	switch (profile)
	{
	case VDP_DECODER_PROFILE_MPEG1:
	case VDP_DECODER_PROFILE_MPEG2_SIMPLE:
	case VDP_DECODER_PROFILE_MPEG2_MAIN:
		ret = new_decoder_mpeg12(dec);
		break;

	case VDP_DECODER_PROFILE_H264_BASELINE:
	case VDP_DECODER_PROFILE_H264_MAIN:
	case VDP_DECODER_PROFILE_H264_HIGH:
		ret = new_decoder_h264(dec);
		break;

	case VDP_DECODER_PROFILE_MPEG4_PART2_SP:
	case VDP_DECODER_PROFILE_MPEG4_PART2_ASP:
		ret = new_decoder_mp4(dec);
		break;

	default:
		ret = VDP_STATUS_INVALID_DECODER_PROFILE;
		break;
	}

	if (ret != VDP_STATUS_OK)
		goto err_decoder;

	int handle = handle_create(dec);
	if (handle == -1)
		goto err_handle;

	*decoder = handle;
	return VDP_STATUS_OK;

err_handle:
	if (dec->private_free)
		dec->private_free(dec);
err_decoder:
	ve_free(dec->data);
err_data:
	free(dec);
err_ctx:
	return VDP_STATUS_RESOURCES;
}
Exemple #13
0
void decode_mpeg(struct frame_buffers_t *frame_buffers, const struct mpeg_t * const mpeg)
{
	int input_size = (mpeg->len + 65535) & ~65535;
	uint8_t *input_buffer = ve_malloc(input_size);
	memcpy(input_buffer, mpeg->data, mpeg->len);
	ve_flush_cache(input_buffer, mpeg->len);

	void *ve_regs = ve_get_regs();

	// set quantisation tables
	set_quantization_tables(ve_regs, mpeg_default_intra_quant, mpeg_default_non_intra_quant);

	// set size
	uint16_t width = (mpeg->width + 15) / 16;
	uint16_t height = (mpeg->height + 15) / 16;
	writel(ve_regs + 0x100 + 0x08, (width << 8) | height);
	writel(ve_regs + 0x100 + 0x0c, ((width * 16) << 16) | (height * 16));

	// set picture header
	uint32_t pic_header = 0x00000000;
	pic_header |= ((mpeg->picture_coding_type & 0xf) << 28);
	pic_header |= ((mpeg->f_code[0][0] & 0xf) << 24);
	pic_header |= ((mpeg->f_code[0][1] & 0xf) << 20);
	pic_header |= ((mpeg->f_code[1][0] & 0xf) << 16);
	pic_header |= ((mpeg->f_code[1][1] & 0xf) << 12);
	pic_header |= ((mpeg->intra_dc_precision & 0x3) << 10);
	pic_header |= ((mpeg->picture_structure & 0x3) << 8);
	pic_header |= ((mpeg->top_field_first & 0x1) << 7);
	pic_header |= ((mpeg->frame_pred_frame_dct & 0x1) << 6);
	pic_header |= ((mpeg->concealment_motion_vectors & 0x1) << 5);
	pic_header |= ((mpeg->q_scale_type & 0x1) << 4);
	pic_header |= ((mpeg->intra_vlc_format & 0x1) << 3);
	pic_header |= ((mpeg->alternate_scan & 0x1) << 2);
	pic_header |= ((mpeg->full_pel_forward_vector & 0x1) << 1);
	pic_header |= ((mpeg->full_pel_backward_vector & 0x1) << 0);
	writel(ve_regs + 0x100 + 0x00, pic_header);

	// ??
	writel(ve_regs + 0x100 + 0x10, 0x00000000);

	// ??
	writel(ve_regs + 0x100 + 0x14, 0x800001b8);

	// ??
	writel(ve_regs + 0x100 + 0xc4, 0x00000000);

	// ??
	writel(ve_regs + 0x100 + 0xc8, 0x00000000);

	// set forward/backward predicion buffers
	if (mpeg->picture_coding_type == PCT_I || mpeg->picture_coding_type == PCT_P)
	{
		frame_unref(frame_buffers->forward);
		frame_buffers->forward = frame_ref(frame_buffers->backward);
		frame_unref(frame_buffers->backward);
		frame_buffers->backward = frame_ref(frame_buffers->output);
	}
	writel(ve_regs + 0x100 + 0x50, ve_virt2phys(frame_buffers->forward->luma_buffer));
	writel(ve_regs + 0x100 + 0x54, ve_virt2phys(frame_buffers->forward->chroma_buffer));
	writel(ve_regs + 0x100 + 0x58, ve_virt2phys(frame_buffers->backward->luma_buffer));
	writel(ve_regs + 0x100 + 0x5c, ve_virt2phys(frame_buffers->backward->chroma_buffer));

	// set output buffers (Luma / Croma)
	writel(ve_regs + 0x100 + 0x48, ve_virt2phys(frame_buffers->output->luma_buffer));
	writel(ve_regs + 0x100 + 0x4c, ve_virt2phys(frame_buffers->output->chroma_buffer));
	writel(ve_regs + 0x100 + 0xcc, ve_virt2phys(frame_buffers->output->luma_buffer));
	writel(ve_regs + 0x100 + 0xd0, ve_virt2phys(frame_buffers->output->chroma_buffer));

	// set input offset in bits
	writel(ve_regs + 0x100 + 0x2c, (mpeg->pos - 4) * 8);

	// set input length in bits (+ little bit more, else it fails sometimes ??)
	writel(ve_regs + 0x100 + 0x30, (mpeg->len - (mpeg->pos - 4) + 16) * 8);

	// input end
	writel(ve_regs + 0x100 + 0x34, ve_virt2phys(input_buffer) + input_size - 1);

	// set input buffer
	writel(ve_regs + 0x100 + 0x28, ve_virt2phys(input_buffer) | 0x50000000);

	// trigger
	writel(ve_regs + 0x100 + 0x18, (mpeg->type ? 0x02000000 : 0x01000000) | 0x8000000f);

	// wait for interrupt
	ve_wait(1);

	// clean interrupt flag (??)
	writel(ve_regs + 0x100 + 0x1c, 0x0000c00f);

	ve_free(input_buffer);
}
Exemple #14
0
int main(int argc, char *argv[])
{
    int rc;
    char *outjpeg = "poc.jpeg";
    int quality = 100;
    uint32_t w = 0;
    uint32_t h = 0;
    uint32_t bufsize = 0;
    struct ve_mem *Y_mem = NULL;
    struct ve_mem *C_mem = NULL;
    struct ve_mem *J_mem = NULL;
    uint8_t *Y = NULL;
    uint8_t *C = NULL;
    uint8_t *J = NULL;
    uint32_t Jsize = 0;
    uint32_t Jwritten = 0;

    if (argc != 4 && argc != 5) {
        fprintf(stderr, "usage: %s width height quality [out.jpeg]\n", argv[0]);
        return 1;
    }

    w = atoi(argv[1]);
    h = atoi(argv[2]);
    quality = atoi(argv[3]);
    if (argc > 4)
        outjpeg = argv[4];

    rc = ve_open();
    if (rc == 0) {
        printf("[JEPOC] error: could not open ve engine!\n");
        return 1;
    }

    w = (w + 15) & ~15;
    h = (h + 15) & ~15;
    printf("[JEPOC] picture %dx%-d at %d quality\n", w, h, quality);
    /* 3 times to leave enough room to try different color formats */
    bufsize = w * h;
    Y_mem = ve_malloc(bufsize);
    if (!Y_mem) {
        printf("[JEPOC] ve memory error! [%d]\n", __LINE__);
        return 1;
    }
    Y = (uint8_t *) Y_mem->virt;
    C_mem = ve_malloc(bufsize);
    if (!C_mem) {
        printf("[JEPOC] ve memory error! [%d]\n", __LINE__);
        return 1;
    }
    C = (uint8_t *) C_mem->virt;
    memset(Y, 0x80, bufsize);
    memset(C, 0x80, bufsize);
    picture_generate(w, h, Y, C);
    printf("[JEPOC] picture generated.\n");

    /* flush for H3 */
    ve_flush_cache(Y_mem);
    ve_flush_cache(C_mem);

    Jsize = 0x800000;
    J_mem = ve_malloc(Jsize);
    if (!J_mem) {
        printf("[JEPOC] ve memory error! [%d]\n", __LINE__);
        return 1;
    }
    J = (uint8_t *) J_mem->virt;

    veavc_select_subengine();
    veisp_set_buffers(Y_mem, C_mem);
    veisp_init_picture(w, h, VEISP_COLOR_FORMAT_NV12);

    veavc_init_vle(J_mem, Jsize);
    veavc_init_ctrl(VEAVC_ENCODER_MODE_JPEG);
    veavc_jpeg_parameters(1, 0, 0, 0);

    vejpeg_header_create(w, h, quality);
    vejpeg_write_SOF0();
    vejpeg_write_SOS();
    vejpeg_write_quantization();

    printf("[JEPOC] launch encoding.\n");
    veavc_launch_encoding();
    ve_wait(2);
    veavc_check_status();

    Jwritten = veavc_get_written();
    /* flush for H3 */
    ve_flush_cache(J_mem);
    vejpeg_write_file(outjpeg, J, Jwritten);
    printf("[JEPOC] written %d bytes to %s\n", Jwritten, outjpeg);

    ve_free(J_mem);
    ve_free(C_mem);
    ve_free(Y_mem);
    ve_close();
    return 0;
}