Exemplo n.º 1
0
static int libschroedinger_encode_frame(AVCodecContext *avccontext, AVPacket *pkt,
                                        const AVFrame *frame, int *got_packet)
{
    int enc_size = 0;
    SchroEncoderParams *p_schro_params = avccontext->priv_data;
    SchroEncoder *encoder = p_schro_params->encoder;
    struct FFSchroEncodedFrame *p_frame_output = NULL;
    int go = 1;
    SchroBuffer *enc_buf;
    int presentation_frame;
    int parse_code;
    int last_frame_in_sequence = 0;
    int pkt_size, ret;

    if (!frame) {
        /* Push end of sequence if not already signalled. */
        if (!p_schro_params->eos_signalled) {
            schro_encoder_end_of_stream(encoder);
            p_schro_params->eos_signalled = 1;
        }
    } else {
        /* Allocate frame data to schro input buffer. */
        SchroFrame *in_frame = libschroedinger_frame_from_data(avccontext,
                                                               frame);
        /* Load next frame. */
        schro_encoder_push_frame(encoder, in_frame);
    }

    if (p_schro_params->eos_pulled)
        go = 0;

    /* Now check to see if we have any output from the encoder. */
    while (go) {
        SchroStateEnum state;
        state = schro_encoder_wait(encoder);
        switch (state) {
        case SCHRO_STATE_HAVE_BUFFER:
        case SCHRO_STATE_END_OF_STREAM:
            enc_buf = schro_encoder_pull(encoder, &presentation_frame);
            assert(enc_buf->length > 0);
            assert(enc_buf->length <= buf_size);
            parse_code = enc_buf->data[4];

            /* All non-frame data is prepended to actual frame data to
             * be able to set the pts correctly. So we don't write data
             * to the frame output queue until we actually have a frame
             */
            p_schro_params->enc_buf = av_realloc(p_schro_params->enc_buf,
                                                 p_schro_params->enc_buf_size + enc_buf->length);

            memcpy(p_schro_params->enc_buf + p_schro_params->enc_buf_size,
                   enc_buf->data, enc_buf->length);
            p_schro_params->enc_buf_size += enc_buf->length;


            if (state == SCHRO_STATE_END_OF_STREAM) {
                p_schro_params->eos_pulled = 1;
                go = 0;
            }

            if (!SCHRO_PARSE_CODE_IS_PICTURE(parse_code)) {
                schro_buffer_unref(enc_buf);
                break;
            }

            /* Create output frame. */
            p_frame_output = av_mallocz(sizeof(FFSchroEncodedFrame));
            /* Set output data. */
            p_frame_output->size     = p_schro_params->enc_buf_size;
            p_frame_output->p_encbuf = p_schro_params->enc_buf;
            if (SCHRO_PARSE_CODE_IS_INTRA(parse_code) &&
                SCHRO_PARSE_CODE_IS_REFERENCE(parse_code))
                p_frame_output->key_frame = 1;

            /* Parse the coded frame number from the bitstream. Bytes 14
             * through 17 represesent the frame number. */
            p_frame_output->frame_num = AV_RB32(enc_buf->data + 13);

            ff_schro_queue_push_back(&p_schro_params->enc_frame_queue,
                                     p_frame_output);
            p_schro_params->enc_buf_size = 0;
            p_schro_params->enc_buf      = NULL;

            schro_buffer_unref(enc_buf);

            break;

        case SCHRO_STATE_NEED_FRAME:
            go = 0;
            break;

        case SCHRO_STATE_AGAIN:
            break;

        default:
            av_log(avccontext, AV_LOG_ERROR, "Unknown Schro Encoder state\n");
            return -1;
        }
    }

    /* Copy 'next' frame in queue. */

    if (p_schro_params->enc_frame_queue.size == 1 &&
        p_schro_params->eos_pulled)
        last_frame_in_sequence = 1;

    p_frame_output = ff_schro_queue_pop(&p_schro_params->enc_frame_queue);

    if (!p_frame_output)
        return 0;

    pkt_size = p_frame_output->size;
    if (last_frame_in_sequence && p_schro_params->enc_buf_size > 0)
        pkt_size += p_schro_params->enc_buf_size;
    if ((ret = ff_alloc_packet(pkt, pkt_size)) < 0) {
        av_log(avccontext, AV_LOG_ERROR, "Error getting output packet of size %d.\n", pkt_size);
        goto error;
    }

    memcpy(pkt->data, p_frame_output->p_encbuf, p_frame_output->size);
    avccontext->coded_frame->key_frame = p_frame_output->key_frame;
    /* Use the frame number of the encoded frame as the pts. It is OK to
     * do so since Dirac is a constant frame rate codec. It expects input
     * to be of constant frame rate. */
    pkt->pts =
    avccontext->coded_frame->pts = p_frame_output->frame_num;
    pkt->dts = p_schro_params->dts++;
    enc_size = p_frame_output->size;

    /* Append the end of sequence information to the last frame in the
     * sequence. */
    if (last_frame_in_sequence && p_schro_params->enc_buf_size > 0) {
        memcpy(pkt->data + enc_size, p_schro_params->enc_buf,
               p_schro_params->enc_buf_size);
        enc_size += p_schro_params->enc_buf_size;
        av_freep(&p_schro_params->enc_buf);
        p_schro_params->enc_buf_size = 0;
    }

    if (p_frame_output->key_frame)
        pkt->flags |= AV_PKT_FLAG_KEY;
    *got_packet = 1;

error:
    /* free frame */
    libschroedinger_free_frame(p_frame_output);
    return ret;
}
Exemplo n.º 2
0
static bool
RunTest(int bit_depth)
{
	bool result = true;

	const int seq_len = 5;
	
	const int width = 100;
	const int height = 10;
	
	int luma_min = 16;
	int luma_max = 235;
	int chroma_zero = 128;
	
	schro_init();
	
	// set up encoder
	SchroEncoder *encoder = schro_encoder_new();
	
	schro_encoder_setting_set_double(encoder, "gop_structure", SCHRO_ENCODER_GOP_INTRA_ONLY);
	schro_encoder_setting_set_double(encoder, "rate_control", SCHRO_ENCODER_RATE_CONTROL_LOSSLESS);
	//schro_encoder_setting_set_double(encoder, "force_profile", SCHRO_ENCODER_PROFILE_VC2_SIMPLE);
	//schro_encoder_setting_set_double(encoder, "queue_depth", seq_len);
	//assert(seq_len <= SCHRO_LIMIT_FRAME_QUEUE_LENGTH);
	
	SchroVideoFormat *format = schro_encoder_get_video_format(encoder);
	
	if(format)
	{
		format->width = width;
		format->height = height;
		
		format->clean_width = format->width;
		format->clean_height = format->height;
		format->left_offset = 0;
		format->top_offset = 0;
		
		format->chroma_format = SCHRO_CHROMA_444;
		
		const SchroSignalRange range = (bit_depth == 12 ? SCHRO_SIGNAL_RANGE_12BIT_VIDEO :
										bit_depth == 10 ? SCHRO_SIGNAL_RANGE_10BIT_VIDEO :
										SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
										
		schro_video_format_set_std_signal_range(format, range);
		
		luma_min = format->luma_offset;
		luma_max = format->luma_offset + format->luma_excursion;
		chroma_zero = format->chroma_offset;
		
		format->colour_primaries = SCHRO_COLOUR_PRIMARY_HDTV;
		format->colour_matrix = SCHRO_COLOUR_MATRIX_HDTV;
		format->transfer_function = SCHRO_TRANSFER_CHAR_TV_GAMMA;
		
		format->interlaced = false;
		
		format->frame_rate_numerator = 24;
		format->frame_rate_denominator = 1;
		
		format->aspect_ratio_numerator = 1;
		format->aspect_ratio_denominator = 1;
		
		schro_encoder_set_video_format(encoder, format);
		
		free(format);
	}
	else
		return false;
	
	schro_encoder_start(encoder);
	
	
	// create frame
	SchroFrame *start_frame = schro_frame_new_and_alloc(NULL, SCHRO_FRAME_FORMAT_U8_444, width, height);
	
	FillFrame<unsigned char>(start_frame, 16, 235, 128);
	
	const SchroFrameFormat schro_format = (bit_depth > 8 ? SCHRO_FRAME_FORMAT_S16_444 : SCHRO_FRAME_FORMAT_U8_444);

	SchroFrame *original_frame = schro_frame_new_and_alloc(NULL, schro_format, width, height);
	
	schro_frame_convert(original_frame, start_frame);
	
	
	
	SchroDecoder *decoder = schro_decoder_new();
	
	// push frames to encoder
	for(int t = 0; t < seq_len; t++)
	{
		SchroFrame *new_frame = schro_frame_dup(original_frame);
	
		schro_encoder_push_frame(encoder, new_frame);
	}
	
	
	
	// pull packets out of encoder, pass to decoder
	int packets_out = 0;
	
	while(packets_out < seq_len)
	{
		SchroStateEnum encoder_state = schro_encoder_wait(encoder);
		
		if(encoder_state == SCHRO_STATE_HAVE_BUFFER || encoder_state == SCHRO_STATE_END_OF_STREAM)
		{
			int n_decodable_frames = -1;
		
			SchroBuffer *buffer = schro_encoder_pull(encoder, &n_decodable_frames);
			
			if(buffer)
			{
				const int parse_code = buffer->data[4];
				
				if(SCHRO_PARSE_CODE_IS_SEQ_HEADER(parse_code) ||
					SCHRO_PARSE_CODE_IS_AUXILIARY_DATA(parse_code) ||
					SCHRO_PARSE_CODE_IS_PICTURE(parse_code))
				{
					schro_decoder_push(decoder, buffer);
					
					//schro_buffer_unref(buffer);
					
					if(SCHRO_PARSE_CODE_IS_PICTURE(parse_code))
					{
						packets_out++;
					}
				}
			}
		}
		else
		{
			assert(encoder_state == SCHRO_STATE_NEED_FRAME);
			assert(encoder_state != SCHRO_STATE_AGAIN); // yeah, redundant
		
			schro_encoder_end_of_stream(encoder);
		}
	}
	
	
	
	// pull frames out of decoder
	int frames_out = 0;
	
	while(frames_out < seq_len)
	{
		int decoder_state = schro_decoder_wait(decoder);
		
		if(decoder_state == SCHRO_DECODER_FIRST_ACCESS_UNIT)
		{
			SchroVideoFormat *format = schro_decoder_get_video_format(decoder);
			
			if(format)
			{
				assert(format->width == width);
				assert(format->height == height);
				
				assert(format->chroma_format == SCHRO_CHROMA_444);
				
				assert(format->luma_offset == luma_min);
				assert(format->luma_offset + format->luma_excursion == luma_max);
				assert(format->chroma_offset = chroma_zero);
				
				free(format);
			}
		}
		else if(decoder_state == SCHRO_DECODER_NEED_BITS)
		{
			schro_decoder_push_end_of_stream(decoder);
		}
		else if(decoder_state == SCHRO_DECODER_NEED_FRAME)
		{
			SchroFrame *decoder_frame = schro_frame_new_and_alloc(NULL, schro_format, width, height);
			
			schro_decoder_add_output_picture(decoder, decoder_frame);
		}
		else if(decoder_state == SCHRO_DECODER_OK || decoder_state == SCHRO_DECODER_EOS)
		{
			SchroFrame *decoder_frame = schro_decoder_pull(decoder);
			
			if(decoder_frame)
			{
				frames_out++;
			
				bool match = CompareFrames(decoder_frame, original_frame);
				
				//std::cout << (match ? "Match!" : "No Match!") << "  " << std::endl;
				
				if(!match)
				{
					// output doesn't match input, so print the values of the
					// first line of the first component to see what went in and out
					PrintFirstLine(original_frame);
					std::cout << "==========" << std::endl;
					PrintFirstLine(decoder_frame);
					std::cout << "==========" << std::endl;
				
					result = false;
				}
				
				schro_frame_unref(decoder_frame);
			}
		}
	}

	schro_frame_unref(original_frame);
	schro_frame_unref(start_frame);
	
	schro_decoder_free(decoder);
	schro_encoder_free(encoder);
	
	return result;
}
int main (int argc, char *argv[]) {

	krad_v4l2_vpx_display_test_t *display_test;
	krad_flac_t *krad_flac;
	krad_dirac_t *krad_dirac;
	krad_vpx_encoder_t *krad_vpx_encoder;
	//krad_vpx_decoder_t *krad_vpx_decoder;
	kradgui_t *kradgui;
	kradebml_t *ebml;
	cairo_surface_t *cst;
	cairo_t *cr;
	char *filename = "/home/oneman/kode/testmedia/capture/new_testfile4.webm";
	int hud_width, hud_height;
	int hud_stride;
	int hud_byte_size;
	unsigned char *hud_data;
	struct SwsContext *sws_context;

	int fps;

	int videotrack;
	int audiotrack;
	char *device;
	krad_v4l2_t *kradv4l2;
	krad_sdl_opengl_display_t *krad_opengl_display;
	int width;
	int height;
	int count;
	//int first_frame;
	int read_composited;
	
	unsigned char *read_screen_buffer;
	unsigned char *dbuffer;
	unsigned char *dfbuffer;
	krad_audio_t *audio;
	krad_audio_api_t audio_api;
	krad_vorbis_t *krad_vorbis;
	
	hud_width = 320;
	hud_height = 240;

	read_composited = 1;
	//first_frame = 1;
	
	width = 640;
	height = 480;
	fps = 30;
	count = 0;
	int keyframe;
	int bitrate;
	void *frame = NULL;
	void *vpx_packet;
	int packet_size;
	int took;
	int video_codec;
	int audio_codec;
	int framenum;
	int bytes;
	
	bitrate = 1000;
	
	video_codec = 1;
	audio_codec = 1;
		
	if (argc < 2) {
		device = DEFAULT_DEVICE;
		printf("no device provided, using %s , to provide a device, example: krad_v4l2_test /dev/video0\n", device);
	} else {
		device = argv[1];
		printf("Using %s\n", device);
	}

	display_test = calloc(1, sizeof(krad_v4l2_vpx_display_test_t));
	pthread_rwlock_init(&display_test->ebml_write_lock, NULL);
	pthread_rwlock_rdlock(&display_test->ebml_write_lock);
	//display_test->kradtimer = kradtimer_create();
	display_test->input_ringbuffer[0] = krad_ringbuffer_create (RINGBUFFER_SIZE);
	display_test->input_ringbuffer[1] = krad_ringbuffer_create (RINGBUFFER_SIZE);
	display_test->samples[0] = malloc(4 * 8192);
	display_test->samples[1] = malloc(4 * 8192);
	display_test->first_block = 1;
	display_test->audio_codec = audio_codec;
	
	dbuffer = calloc(1, 2000000);
	dfbuffer = calloc(1, 2000000);
	audio_api = JACK;
	//audio = kradaudio_create("krad v4l2 vpx display test", audio_api);
	krad_vorbis = krad_vorbis_encoder_create(2, 44100, 0.7);
	krad_flac = krad_flac_encoder_create(1, 44100, 16);
	
	read_screen_buffer = calloc(1, width * height * 4 * 4 * 4 * 4);
	sws_context = sws_getContext ( width, height, PIX_FMT_RGB32, width, height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
	ebml = kradebml_create();
	//kradebml_open_output_stream(ebml, "192.168.1.2", 9080, "/teststream.webm", "secretkode");
	kradebml_open_output_file(ebml, filename);
	if (video_codec == 1) {
		kradebml_header(ebml, "webm", APPVERSION);
	} else {
		kradebml_header(ebml, "matroska", APPVERSION);
	}
	if (video_codec == 1) {
		videotrack = kradebml_add_video_track(ebml, "V_VP8", 30, width, height);
	}
	if (video_codec == 2) {
		videotrack = kradebml_add_video_track(ebml, "V_DIRAC", 10, width, height);
	}
	if (audio_codec == 1) {
		audiotrack = kradebml_add_audio_track(ebml, "A_VORBIS", 44100, 2, krad_vorbis->header, krad_vorbis->headerpos);
	}
	if (audio_codec == 2) {
		bytes = krad_flac_encoder_read_min_header(krad_flac, dfbuffer);
		audiotrack = kradebml_add_audio_track(ebml, "A_FLAC", 44100, 1, dfbuffer, bytes);
		display_test->krad_flac = krad_flac;
	}	
	kradebml_write(ebml);

	krad_opengl_display = krad_sdl_opengl_display_create(APPVERSION, width, height, width, height);
	//krad_opengl_display = krad_sdl_opengl_display_create(APPVERSION, 1920, 1080, width, height);
	
	krad_vpx_encoder = krad_vpx_encoder_create(width, height, bitrate);
	krad_dirac = krad_dirac_encoder_create(width, height);
	kradgui = kradgui_create(width, height);
	kradgui_add_item(kradgui, REEL_TO_REEL);
	
	hud_stride = cairo_format_stride_for_width (CAIRO_FORMAT_ARGB32, hud_width);
	hud_byte_size = hud_stride * hud_height;
	hud_data = calloc (1, hud_byte_size);
	cst = cairo_image_surface_create_for_data (hud_data, CAIRO_FORMAT_ARGB32, hud_width, hud_height, hud_stride);
	
	krad_opengl_display->hud_width = hud_width;
	krad_opengl_display->hud_height = hud_height;
	krad_opengl_display->hud_data = hud_data;
	display_test->audiotrack = audiotrack;
	display_test->ebml = ebml;
	//display_test->audio = audio;
	display_test->krad_vorbis = krad_vorbis;
	display_test->kradgui = kradgui;

	kradv4l2 = kradv4l2_create();

	kradv4l2_open(kradv4l2, device, width, height, fps);
	
	kradv4l2_start_capturing (kradv4l2);
	
	kradgui_reset_elapsed_time(kradgui);
	kradgui->render_timecode = 1;
	
	audio = kradaudio_create("krad v4l2 vpx display test", KINPUT, audio_api);
	display_test->audio = audio;
	pthread_create(&display_test->audio_encoding_thread, NULL, audio_encoding_thread, (void *)display_test);
	
	kradaudio_set_process_callback(audio, krad_v4l2_vpx_display_test_audio_callback, display_test);
	if (audio_api == JACK) {
		krad_jack_t *jack = (krad_jack_t *)audio->api;
		kradjack_connect_port(jack->jack_client, "firewire_pcm:001486af2e61ac6b_Unknown0_in", "krad v4l2 vpx display test:InputLeft");
		kradjack_connect_port(jack->jack_client, "firewire_pcm:001486af2e61ac6b_Unknown0_in", "krad v4l2 vpx display test:InputRight");
	}
	while (count < TEST_COUNT) {
		//kradtimer_start(display_test->kradtimer, "cycle");


		cr = cairo_create(cst);
		kradgui->cr = cr;
		kradgui_render(kradgui);
		cairo_destroy(cr);
		
		frame = kradv4l2_read_frame_wait (kradv4l2);
		
		if (display_test->start_audio == 0) {
			display_test->start_audio = 1;
			usleep(200000);
		}
		
		if (video_codec == 2) {
		//	memcpy(dfbuffer, frame, width * height + (((width * height) / 2) * 2));
		}
		krad_vpx_convert_uyvy2yv12(krad_vpx_encoder->image, frame, width, height);
		///krad_vpx_convert_frame_for_local_gl_display(krad_vpx_encoder);
		kradv4l2_frame_done (kradv4l2);
		
		
		
		krad_sdl_opengl_display_render(krad_opengl_display, krad_vpx_encoder->image->planes[0], krad_vpx_encoder->image->stride[0], krad_vpx_encoder->image->planes[1], krad_vpx_encoder->image->stride[1], krad_vpx_encoder->image->planes[2], krad_vpx_encoder->image->stride[2]);

		krad_sdl_opengl_draw_screen( krad_opengl_display );
		
		if (read_composited) {
			krad_sdl_opengl_read_screen( krad_opengl_display, read_screen_buffer);
			rgb_to_yv12(sws_context, read_screen_buffer, width, height, krad_vpx_encoder->image->planes, krad_vpx_encoder->image->stride);
			vpx_img_flip(krad_vpx_encoder->image);
		}
		
		if (video_codec == 1) {
		
			count++;
		
			packet_size = krad_vpx_encoder_write(krad_vpx_encoder, (unsigned char **)&vpx_packet, &keyframe);

			//printf("packet size was %d\n", packet_size);
			if (read_composited) {
				vpx_img_flip(krad_vpx_encoder->image);
			}
		
			if (packet_size) {
				pthread_rwlock_wrlock(&display_test->ebml_write_lock);
				kradebml_add_video(ebml, videotrack, vpx_packet, packet_size, keyframe);
				//kradebml_write(ebml);
				pthread_rwlock_unlock (&display_test->ebml_write_lock);		
			}


		}
		
		
		if (video_codec == 2) {

//			packet_size = krad_dirac_encode (krad_dirac, dfbuffer, dbuffer, &framenum, &took);
			packet_size = krad_dirac_encode (krad_dirac, krad_vpx_encoder->image->img_data, dbuffer, &framenum, &took);
			if (took == 1) {
				took = 0;
				count++;
			}
	
			if (packet_size > 0) {
				krad_dirac_packet_type(dbuffer[4]);
				//write(fd, buffer, len);
				printf("Encoded size is %d for frame %d\n", packet_size, framenum);
			}
		
			//printf("packet size was %d\n", packet_size);
			if (read_composited) {
				vpx_img_flip(krad_vpx_encoder->image);
			}
		
			keyframe = 0;
		
			if (packet_size) {
				pthread_rwlock_wrlock(&display_test->ebml_write_lock);
				kradebml_add_video(ebml, videotrack, dbuffer, packet_size, keyframe);
				//kradebml_write(ebml);
				pthread_rwlock_unlock (&display_test->ebml_write_lock);		
			}
		
		}
		
	//kradtimer_finish_show(display_test->kradtimer);
	}
	
	display_test->start_audio = 0;
	
	if (video_codec == 2) {
		
		schro_encoder_end_of_stream (krad_dirac->encoder);
//			packet_size = krad_dirac_encode (krad_dirac, dfbuffer, dbuffer, &framenum, &took);

		while ((framenum != count - 1) && (framenum != 0)) {

			packet_size = krad_dirac_encode (krad_dirac, krad_vpx_encoder->image->img_data, dbuffer, &framenum, &took);

			if (packet_size > 0) {
				krad_dirac_packet_type(dbuffer[4]);
				//write(fd, buffer, len);
				printf("Encoded size is %d for frame %d\n", packet_size, framenum);
			} else {
				usleep(50000);
			}
	
			//printf("packet size was %d\n", packet_size);
			if (read_composited) {
				//vpx_img_flip(krad_vpx_encoder->image);
			}
	
			keyframe = 0;
	
			if (packet_size) {
				pthread_rwlock_wrlock(&display_test->ebml_write_lock);
				kradebml_add_video(ebml, videotrack, dbuffer, packet_size, keyframe);
				//kradebml_write(ebml);
				pthread_rwlock_unlock (&display_test->ebml_write_lock);		
			}
	
		}
	
	}
	

	//display_test->start_audio = 0;
	printf("finish audio encoding\n");
	usleep(2000000);
	display_test->stop_audio_encoding = 1;
	
	
	pthread_join(display_test->audio_encoding_thread, NULL);
	
	kradebml_destroy(ebml);

	kradv4l2_stop_capturing (kradv4l2);

	kradv4l2_close(kradv4l2);

	kradv4l2_destroy(kradv4l2);
	krad_dirac_encoder_destroy(krad_dirac);
	krad_vpx_encoder_destroy(krad_vpx_encoder);
	
	// must be before vorbis
	kradaudio_destroy(audio);
	
	krad_vorbis_encoder_destroy(krad_vorbis);
	krad_flac_encoder_destroy(krad_flac);
	free(display_test->samples[0]);
	free(display_test->samples[1]);
	
	krad_sdl_opengl_display_destroy(krad_opengl_display);
	kradgui_destroy(kradgui);
	
	free(read_screen_buffer);
	sws_freeContext (sws_context);
	
	krad_ringbuffer_free ( display_test->input_ringbuffer[0] );
	krad_ringbuffer_free ( display_test->input_ringbuffer[1] );
	pthread_rwlock_destroy(&display_test->ebml_write_lock);
	//kradtimer_destroy(display_test->kradtimer);
	free(display_test);
	free(dbuffer);
	free(dfbuffer);
	return 0;

}
Exemplo n.º 4
0
void dirac_encode_test() {

	krad_dirac_t *krad_dirac;

	char filename[512];

	int count;
	unsigned char *buffer;
	unsigned char *framedata;
	int len;
	int took;
	int frame;
	
	int fd;


	strcpy(filename, "/home/oneman/Videos/");
	//strcat(filename, "rp-bvfi9-cs-lvl3-d13-noac-nocb-pcmp-cr422-10b.drc");
	strcat(filename, "test_dirac_encode1.drc");

	fd = open (filename, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH);
	
	buffer = calloc(1, 2000000);
	len = 0;
	took = 0;
	count = 0;
	krad_dirac = krad_dirac_encoder_create(1280, 720);
	
	while (count < TEST_COUNT1) {
	
	
		framedata = malloc(krad_dirac->size);
		memset (framedata, 64 + rand() % 64, krad_dirac->size);
	
		len = krad_dirac_encode (krad_dirac, framedata, buffer, &frame, &took);
		if (took == 1) {
			took = 0;
			count++;
		}
		
		if (len > 0) {
			krad_dirac_packet_type(buffer[4]);
			write(fd, buffer, len);
			printf("Encoded size is %d for frame %d\n", len, frame);
		}
		
	}
	
	printf("submitted %d frames\n", count);
	
	schro_encoder_end_of_stream (krad_dirac->encoder);
	
	while (frame != count - 1) {
		len = krad_dirac_encode (krad_dirac, NULL, buffer, &frame, &took);
		if (len > 0) {
			krad_dirac_packet_type(buffer[4]);
			write(fd, buffer, len);
			printf("Encoded size is %d for frame %d\n", len, frame);
		} else {
			usleep(50000);
		}
	}
	
	printf("finished!\n");
	
	krad_dirac_encoder_destroy(krad_dirac);

	close(fd);
	
	free(buffer);

}