Ejemplo n.º 1
0
static void mp4_report(GF_SceneLoader *load, GF_Err e, char *format, ...)
{
#ifndef GPAC_DISABLE_LOG
	if (gf_log_tool_level_on(GF_LOG_PARSER, e ? GF_LOG_ERROR : GF_LOG_WARNING)) {
		char szMsg[1024];
		va_list args;
		va_start(args, format);
		vsprintf(szMsg, format, args);
		va_end(args);
		GF_LOG((u32) (e ? GF_LOG_ERROR : GF_LOG_WARNING), GF_LOG_PARSER, ("[MP4 Loading] %s\n", szMsg) );
	}
#endif
}
Ejemplo n.º 2
0
/// <summary>
/// Entry point for the application
/// </summary>
/// <param name="hInstance">handle to the application instance</param>
/// <param name="hPrevInstance">always 0</param>
/// <param name="lpCmdLine">command line arguments</param>
/// <param name="nCmdShow">whether to display minimized, maximized, or normally</param>
/// <returns>status</returns>
int APIENTRY wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPWSTR lpCmdLine, int nCmdShow)
{
    CColorBasics application;

	//params:
	DASHout *dasher;
	loook_opt *options;
	
#ifdef _DEBUG
	RedirectIOToConsole();
#endif
	
	options = loook_init();
	
	gf_sys_init(GF_MemTrackerNone);
#ifdef USE_GPAC_LOG
	gf_log_set_tool_level(GF_LOG_DASH, GF_LOG_DEBUG);
	if (gf_log_tool_level_on(GF_LOG_DASH, GF_LOG_DEBUG)) {
		printf("log on");
	} else {
		printf("log off");
	}
	gf_log_set_callback(NULL, on_gpac_log);
#endif
	dasher = muxer_init(options);

	if(!dasher){
		return 1;
	}	

	application.setDasher(dasher);

    application.Run(hInstance, nCmdShow, dasher);
	/*
KinectSensor sensor;

  foreach (var potentialSensor in KinectSensor.KinectSensors)
  {
    if (potentialSensor.Status == KinectStatus.Connected)
    {
      this.sensor = potentialSensor;
      break;
    }
  }*/
}
Ejemplo n.º 3
0
static void rewrite_log_tools(u32 tool)
{
	char *logs;
	if (tool == GF_LOG_ALL) {
		gf_log_set_tool_level(GF_LOG_ALL, log_level);
	} else {
		if (gf_log_tool_level_on(tool, log_level)) {
			gf_log_set_tool_level(tool, GF_LOG_QUIET);
		} else {
			gf_log_set_tool_level(tool, log_level);
		}
	}
	logs = gf_log_get_tools_levels();
	if (logs) {
		gf_cfg_set_key(user.config, "General", "Logs", logs);
		gf_free(logs);
	}
	setup_logs();
}
Ejemplo n.º 4
0
Bool visual_2d_terminate_draw(GF_VisualManager *visual, GF_TraverseState *tr_state)
{
	u32 k, i, count, num_nodes, num_changed;
	GF_IRect refreshRect;
	Bool redraw_all;
	Bool hyb_force_redraw=GF_FALSE;
#ifndef GPAC_DISABLE_VRML
	M_Background2D *bck = NULL;
	DrawableContext *bck_ctx = NULL;
#endif
	DrawableContext *ctx;
	struct _drawable_store *it, *prev;
	DrawableContext *first_opaque = NULL;
	Bool has_clear = 0;
	Bool has_changed = 0;
	Bool redraw_all_on_background_change = GF_TRUE;

	/*in direct mode the visual is always redrawn*/
	if (tr_state->immediate_draw) {
		/*flush pending contexts due to overlays*/
		visual_2d_flush_overlay_areas(visual, tr_state);

		visual_2d_release_raster(visual);
		visual_clean_contexts(visual);
		visual->num_nodes_prev_frame = visual->num_nodes_current_frame;
		return 1;
	}

	num_changed = 0;

	/*if the aspect ratio has changed redraw everything*/
	redraw_all = tr_state->invalidate_all;

#ifndef GPAC_DISABLE_3D
	if (visual->compositor->hybrid_opengl && !visual->offscreen) redraw_all_on_background_change = GF_FALSE;
#endif
	/*check for background changes for transparent nodes*/
#ifndef GPAC_DISABLE_VRML
	bck = (M_Background2D*)gf_list_get(visual->back_stack, 0);
	if (bck) {
		if (!bck->isBound) {
			if (visual->last_had_back) {
				if (redraw_all_on_background_change) redraw_all = 1;
				else hyb_force_redraw = 1;
			}
			visual->last_had_back = 0;
		} else {
			bck_ctx = b2d_get_context(bck, visual->back_stack);
			if (!visual->last_had_back || (bck_ctx->flags & CTX_REDRAW_MASK) ) {
				if (redraw_all_on_background_change) redraw_all = 1;
				else hyb_force_redraw = 1;
			}
			visual->last_had_back = (bck_ctx->aspect.fill_texture && !bck_ctx->aspect.fill_texture->transparent) ? 2 : 1;
		}
	} else
#endif
		if (visual->last_had_back) {
			visual->last_had_back = 0;
			if (redraw_all_on_background_change) redraw_all = 1;
			else hyb_force_redraw = 1;
		}

	num_nodes = 0;
	ctx = visual->context;
	while (ctx && ctx->drawable) {
		num_nodes++;

		drawctx_update_info(ctx, visual);
		if (!redraw_all) {
			u32 res;
			assert( gf_irect_inside(&visual->top_clipper, &ctx->bi->clip) );
			res = register_context_rect(&visual->to_redraw, ctx, num_nodes, &first_opaque);
			if (res) {
				num_changed ++;
				if (res==2)
					hyb_force_redraw=GF_TRUE;
			}

		}
		ctx = ctx->next;
	}

	/*garbage collection*/

	/*clear all remaining bounds since last frames (the ones that moved or that are not drawn this frame)*/
	prev = NULL;
	it = visual->prev_nodes;
	while (it) {
		while (drawable_get_previous_bound(it->drawable, &refreshRect, visual)) {
			if (!redraw_all) {
				//assert( gf_irect_inside(&visual->top_clipper, &refreshRect) );
				gf_irect_intersect(&refreshRect, &visual->top_clipper);
				register_dirty_rect(&visual->to_redraw, &refreshRect);
				has_clear=1;
			}
		}
		/*if node is marked as undrawn, remove from visual*/
		if (!(it->drawable->flags & DRAWABLE_DRAWN_ON_VISUAL)) {
			GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual2D] Node %s no longer on visual - unregistering it\n", gf_node_get_class_name(it->drawable->node)));

			/*remove all bounds info related to this visual and unreg node */
			drawable_reset_bounds(it->drawable, visual);

			it->drawable->flags &= ~DRAWABLE_REGISTERED_WITH_VISUAL;

			if (it->drawable->flags & DRAWABLE_IS_OVERLAY) {
				visual->compositor->video_out->Blit(visual->compositor->video_out, NULL, NULL, NULL, 1);
			}

			if (prev) prev->next = it->next;
			else visual->prev_nodes = it->next;
			if (!it->next) visual->last_prev_entry = prev;
			gf_free(it);
			it = prev ? prev->next : visual->prev_nodes;
		} else {
			prev = it;
			it = it->next;
		}
	}

	if (redraw_all) {
		ra_clear(&visual->to_redraw);
		ra_add(&visual->to_redraw, &visual->surf_rect);
#ifdef TRACK_OPAQUE_REGIONS
		visual->to_redraw.list[0].opaque_node_index=0;
#endif
	} else {
		ra_refresh(&visual->to_redraw);

		if (visual->compositor->debug_defer) {
			visual->ClearSurface(visual, &visual->top_clipper, 0);
		}
	}

	/*nothing to redraw*/
	if (!hyb_force_redraw && ra_is_empty(&visual->to_redraw) ) {
#ifndef GPAC_DISABLE_3D
		//force canvas draw
		visual->nb_objects_on_canvas_since_last_ogl_flush = 1;
#endif
		GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[Visual2D] No changes found since last frame - skipping redraw\n"));
		goto exit;
	}
	has_changed = 1;
	tr_state->traversing_mode = TRAVERSE_DRAW_2D;

	if (first_opaque && (visual->to_redraw.count==1) && gf_rect_equal(first_opaque->bi->clip, visual->to_redraw.list[0].rect)) {
		visual->has_modif=0;
		goto skip_background;
	}

	/*redraw everything*/
#ifndef GPAC_DISABLE_VRML
	if (bck_ctx) {
		drawable_check_bounds(bck_ctx, visual);
		tr_state->ctx = bck_ctx;
		visual->draw_node_index = 0;

		/*force clearing entire zone, not just viewport, when using color. If texture, we MUST
		use the VP clipper in order to compute offsets when blitting bitmaps*/
		if (bck_ctx->aspect.fill_texture && bck_ctx->aspect.fill_texture->stream) {
			bck_ctx->bi->clip = visual->top_clipper;
		} else {
			bck_ctx->bi->clip = visual->surf_rect;
		}
		bck_ctx->bi->unclip = gf_rect_ft(&bck_ctx->bi->clip);
		bck_ctx->next = visual->context;
		bck_ctx->flags |= CTX_BACKROUND_NOT_LAYER;
		gf_node_traverse(bck_ctx->drawable->node, tr_state);
		bck_ctx->flags &= ~CTX_BACKROUND_NOT_LAYER;
	} else
#endif /*GPAC_DISABLE_VRML*/
	{
		count = visual->to_redraw.count;
		for (k=0; k<count; k++) {
			GF_IRect rc;
			/*opaque area, skip*/
#ifdef TRACK_OPAQUE_REGIONS
			if (visual->to_redraw.list[k].opaque_node_index > 0) continue;
#endif
			rc = visual->to_redraw.list[k].rect;
			visual->ClearSurface(visual, &rc, 0);
		}
#ifndef GPAC_DISABLE_3D
		if (!count && hyb_force_redraw) {
			compositor_2d_hybgl_clear_surface_ex(tr_state->visual, NULL, 0, GF_FALSE);
		}
#endif
	}
	if (!redraw_all && !has_clear) visual->has_modif=0;

skip_background:

#ifndef GPAC_DISABLE_LOG
	if (gf_log_tool_level_on(GF_LOG_COMPOSE, GF_LOG_INFO)) {
		GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("[Visual2D] Redraw %d / %d nodes (all: %s - %d dirty rects\n)", num_changed, num_nodes, redraw_all ? "yes" : "no", visual->to_redraw.count));
		if (visual->to_redraw.count>1) GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("\n"));

		for (i=0; i<visual->to_redraw.count; i++) {
			GF_LOG(GF_LOG_INFO, GF_LOG_COMPOSE, ("\tDirtyRect #%d: %d:%d@%dx%d\n", i+1, visual->to_redraw.list[i].rect.x, visual->to_redraw.list[i].rect.y, visual->to_redraw.list[i].rect.width, visual->to_redraw.list[i].rect.height));
			assert(visual->to_redraw.list[i].rect.width);
		}
	}
#endif

	visual->draw_node_index = 0;

	ctx = visual->context;
	while (ctx && ctx->drawable) {

		visual->draw_node_index ++;
		tr_state->ctx = ctx;

		/*if overlay we cannot remove the context and cannot draw directly*/
		if (! visual_2d_overlaps_overlay(tr_state->visual, ctx, tr_state)) {

			if (ctx->drawable->flags & DRAWABLE_USE_TRAVERSE_DRAW) {
				gf_node_traverse(ctx->drawable->node, tr_state);
			} else {
				drawable_draw(ctx->drawable, tr_state);
			}
		}
		ctx = ctx->next;
	}
	/*flush pending contexts due to overlays*/
	visual_2d_flush_overlay_areas(visual, tr_state);
#ifndef GPAC_DISABLE_VRML
	if (bck_ctx) bck_ctx->next = NULL;
#endif

	if (visual->direct_flush) {
		GF_DirtyRectangles dr;
		u32 i;
		dr.count = visual->to_redraw.count;
		dr.list = gf_malloc(sizeof(GF_IRect)*dr.count);
		for (i=0; i<dr.count; i++) {
			dr.list[i] = visual->to_redraw.list[i].rect;
		}
		visual->compositor->video_out->FlushRectangles(visual->compositor->video_out, &dr);
		visual->compositor->skip_flush=1;
		gf_free(dr.list);
	}

exit:
	/*clear dirty rects*/
	ra_clear(&visual->to_redraw);
	visual_2d_release_raster(visual);
	visual_clean_contexts(visual);
	visual->num_nodes_prev_frame = visual->num_nodes_current_frame;
	return has_changed;
}
Ejemplo n.º 5
0
int dc_video_decoder_read(VideoInputFile *video_input_file, VideoInputData *video_input_data, int source_number, int use_source_timing, int is_live_capture, const int *exit_signal_addr)
{
#ifdef DASHCAST_DEBUG_TIME_
	struct timeval start, end;
	long elapsed_time;
#endif
	AVPacket packet;
	int ret, got_frame, already_locked = 0;
	AVCodecContext *codec_ctx;
	VideoDataNode *video_data_node;

	/* Get a pointer to the codec context for the video stream */
	codec_ctx = video_input_file->av_fmt_ctx->streams[video_input_file->vstream_idx]->codec;

	/* Read frames */
	while (1) {
#ifdef DASHCAST_DEBUG_TIME_
		gf_gettimeofday(&start, NULL);
#endif
		memset(&packet, 0, sizeof(AVPacket));
		ret = av_read_frame(video_input_file->av_fmt_ctx, &packet);
#ifdef DASHCAST_DEBUG_TIME_
		gf_gettimeofday(&end, NULL);
		elapsed_time = (end.tv_sec * 1000000 + end.tv_usec) - (start.tv_sec * 1000000 + start.tv_usec);
		fprintf(stdout, "fps: %f\n", 1000000.0/elapsed_time);
#endif

		/* If we demux for the audio thread, send the packet to the audio */
		if (video_input_file->av_fmt_ctx_ref_cnt && ((packet.stream_index != video_input_file->vstream_idx) || (ret == AVERROR_EOF))) {
			AVPacket *packet_copy = NULL;
			if (ret != AVERROR_EOF) {
				GF_SAFEALLOC(packet_copy, AVPacket);
				memcpy(packet_copy, &packet, sizeof(AVPacket));
			}

			assert(video_input_file->av_pkt_list);
			gf_mx_p(video_input_file->av_pkt_list_mutex);
			gf_list_add(video_input_file->av_pkt_list, packet_copy);
			gf_mx_v(video_input_file->av_pkt_list_mutex);

			if (ret != AVERROR_EOF) {
				continue;
			}
		}

		if (ret == AVERROR_EOF) {
			if (video_input_file->mode == LIVE_MEDIA && video_input_file->no_loop == 0) {
				av_seek_frame(video_input_file->av_fmt_ctx, video_input_file->vstream_idx, 0, 0);
				av_free_packet(&packet);
				continue;
			}

			dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf);
			dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf);
			video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf);
			video_data_node->source_number = source_number;
			/* Flush decoder */
			memset(&packet, 0, sizeof(AVPacket));
#ifndef FF_API_AVFRAME_LAVC
			avcodec_get_frame_defaults(video_data_node->vframe);
#else
			av_frame_unref(video_data_node->vframe);
#endif

			avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet);
			if (got_frame) {
				dc_producer_advance(&video_input_data->producer, &video_input_data->circular_buf);
				return 0;
			}

			dc_producer_end_signal(&video_input_data->producer, &video_input_data->circular_buf);
			dc_producer_unlock(&video_input_data->producer, &video_input_data->circular_buf);
			return -2;
		}
		else if (ret < 0)
		{
			GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Cannot read video frame.\n"));
			continue;
		}

		/* Is this a packet from the video stream? */
		if (packet.stream_index == video_input_file->vstream_idx) {
			u32 nb_retry = 10;
			while (!already_locked) {
				if (dc_producer_lock(&video_input_data->producer, &video_input_data->circular_buf) < 0) {
					if (!nb_retry) break;
					gf_sleep(10);
					nb_retry--;
					continue;
				}
				dc_producer_unlock_previous(&video_input_data->producer, &video_input_data->circular_buf);
				already_locked = 1;
			}
			if (!already_locked) {
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("[dashcast] Live system dropped a video frame\n"));
				continue;
			}

			video_data_node = (VideoDataNode *) dc_producer_produce(&video_input_data->producer, &video_input_data->circular_buf);
			video_data_node->source_number = source_number;

			/* Set video frame to default */
#ifndef FF_API_AVFRAME_LAVC
			avcodec_get_frame_defaults(video_data_node->vframe);
#else
			av_frame_unref(video_data_node->vframe);
#endif

			/* Decode video frame */
			if (avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet) < 0) {
				av_free_packet(&packet);
				GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Error while decoding video.\n"));
				dc_producer_end_signal(&video_input_data->producer, &video_input_data->circular_buf);
				dc_producer_unlock(&video_input_data->producer, &video_input_data->circular_buf);
				return -1;
			}

			/* Did we get a video frame? */
			if (got_frame) {
				if (use_source_timing && is_live_capture) {
					u64 pts;
					if (video_input_file->pts_init == 0) {
						video_input_file->pts_init = 1;
						video_input_file->utc_at_init = gf_net_get_utc();
						video_input_file->first_pts = packet.pts;
						video_input_file->prev_pts = 0;
						video_input_data->frame_duration = 0;
					}
					if (video_input_file->pts_init && (video_input_file->pts_init!=3) ) {
						if (packet.pts==AV_NOPTS_VALUE) {
							video_input_file->pts_init=1;
						} else if (video_input_file->pts_init==1) {
							video_input_file->pts_init=2;
							video_input_file->pts_dur_estimate = packet.pts;
						} else if (video_input_file->pts_init==2) {
							video_input_file->pts_init=3;
							video_input_data->frame_duration = packet.pts - video_input_file->pts_dur_estimate;
							video_input_file->sync_tolerance = 9*video_input_data->frame_duration/5;
							//TODO - check with audio if sync is OK
						}
					}
					
					//move to 0-based PTS
					if (packet.pts!=AV_NOPTS_VALUE) {
						pts = packet.pts - video_input_file->first_pts;
					} else {
						pts = video_input_file->prev_pts + video_input_data->frame_duration;
					}					

					//check for drop frames
#ifndef GPAC_DISABLE_LOG
					if (0 && gf_log_tool_level_on(GF_LOG_DASH, GF_LOG_WARNING)) {
						if (pts - video_input_file->prev_pts > video_input_file->sync_tolerance) {
							u32 nb_lost=0;
							while (video_input_file->prev_pts + video_input_data->frame_duration + video_input_file->sync_tolerance < pts) {
								video_input_file->prev_pts += video_input_data->frame_duration;
								nb_lost++;
							}
							if (nb_lost) {
								GF_LOG(GF_LOG_WARNING, GF_LOG_DASH, ("[DashCast] Capture lost %d video frames \n", nb_lost));
							}
						}
					}
#endif

					video_input_file->prev_pts = pts;
					video_data_node->vframe->pts = pts;
				}

				if (video_data_node->vframe->pts==AV_NOPTS_VALUE) {
					if (!use_source_timing) {
						video_data_node->vframe->pts = video_input_file->frame_decoded;
					} else {
						video_data_node->vframe->pts = video_data_node->vframe->pkt_pts;
					}
				}
				video_input_file->frame_decoded++;

				GF_LOG(GF_LOG_DEBUG, GF_LOG_DASH, ("[DashCast] Video Frame TS "LLU" decoded at UTC "LLU" ms\n", video_data_node->vframe->pts, gf_net_get_utc() ));

				// For a decode/encode process we must free this memory.
				//But if the input is raw and there is no need to decode then
				// the packet is directly passed for decoded frame. We must wait until rescale is done before freeing it

				if (codec_ctx->codec->id == CODEC_ID_RAWVIDEO) {
					video_data_node->nb_raw_frames_ref = video_input_file->nb_consumers;

					video_data_node->raw_packet = packet;

					dc_producer_advance(&video_input_data->producer, &video_input_data->circular_buf);
					while (video_data_node->nb_raw_frames_ref && ! *exit_signal_addr) {
						gf_sleep(0);
					}
				} else {
					dc_producer_advance(&video_input_data->producer, &video_input_data->circular_buf);
					av_free_packet(&packet);
				}
				return 0;

			}
		}

		/* Free the packet that was allocated by av_read_frame */
		av_free_packet(&packet);
	}

	GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Unknown error while reading video frame.\n"));
	return -1;
}
Ejemplo n.º 6
0
static GF_Err HEVC_ConfigureStream(HEVCDec *ctx, GF_ESD *esd)
{
	u32 i, j;
	GF_HEVCConfig *cfg = NULL;
	ctx->ES_ID = esd->ESID;
	ctx->width = ctx->height = ctx->out_size = ctx->luma_bpp = ctx->chroma_bpp = ctx->chroma_format_idc = 0;

	ctx->nb_layers = 1;

	if (esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		HEVCState hevc;
		memset(&hevc, 0, sizeof(HEVCState));

		cfg = gf_odf_hevc_cfg_read(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, GF_FALSE);
		if (!cfg) return GF_NON_COMPLIANT_BITSTREAM;
		ctx->nalu_size_length = cfg->nal_unit_size;

		for (i=0; i< gf_list_count(cfg->param_array); i++) {
			GF_HEVCParamArray *ar = (GF_HEVCParamArray *)gf_list_get(cfg->param_array, i);
			for (j=0; j< gf_list_count(ar->nalus); j++) {
				GF_AVCConfigSlot *sl = (GF_AVCConfigSlot *)gf_list_get(ar->nalus, j);
				s32 idx;
				u16 hdr = sl->data[0] << 8 | sl->data[1];

				if (ar->type==GF_HEVC_NALU_SEQ_PARAM) {
					idx = gf_media_hevc_read_sps(sl->data, sl->size, &hevc);
					ctx->width = MAX(hevc.sps[idx].width, ctx->width);
					ctx->height = MAX(hevc.sps[idx].height, ctx->height);
					ctx->luma_bpp = MAX(hevc.sps[idx].bit_depth_luma, ctx->luma_bpp);
					ctx->chroma_bpp = MAX(hevc.sps[idx].bit_depth_chroma, ctx->chroma_bpp);
					ctx->chroma_format_idc  = hevc.sps[idx].chroma_format_idc;
					
					if (hdr & 0x1f8) {
						ctx->nb_layers ++;
					}
				}
				else if (ar->type==GF_HEVC_NALU_VID_PARAM) {
					gf_media_hevc_read_vps(sl->data, sl->size, &hevc);
				}
				else if (ar->type==GF_HEVC_NALU_PIC_PARAM) {
					gf_media_hevc_read_pps(sl->data, sl->size, &hevc);
				}
			}
		}
		gf_odf_hevc_cfg_del(cfg);
	} else {
		ctx->nalu_size_length = 0;
	}

	ctx->openHevcHandle = libOpenHevcInit(ctx->nb_threads, ctx->threading_type);

#ifndef GPAC_DISABLE_LOG
	if (gf_log_tool_level_on(GF_LOG_CODEC, GF_LOG_DEBUG) ) {
		libOpenHevcSetDebugMode(ctx->openHevcHandle, 1);
	}
#endif


	if (esd->decoderConfig && esd->decoderConfig->decoderSpecificInfo && esd->decoderConfig->decoderSpecificInfo->data) {
		libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 1/*ctx->nb_layers*/);
		libOpenHevcSetViewLayers(ctx->openHevcHandle, ctx->nb_layers-1);

		libOpenHevcCopyExtraData(ctx->openHevcHandle, (u8 *) esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength);
	} else {
		//hardcoded values: 2 layers max, display layer 0
		libOpenHevcSetActiveDecoders(ctx->openHevcHandle, 1/*ctx->nb_layers*/);
		libOpenHevcSetViewLayers(ctx->openHevcHandle, 0/*ctx->nb_layers-1*/);
	}

	libOpenHevcStartDecoder(ctx->openHevcHandle);


	ctx->stride = ((ctx->luma_bpp==8) && (ctx->chroma_bpp==8)) ? ctx->width : ctx->width * 2;
	if ( ctx->chroma_format_idc  == 1) { // 4:2:0
		ctx->out_size = ctx->stride * ctx->height * 3 / 2;
	}
	else if ( ctx->chroma_format_idc  == 2) { // 4:2:2
		ctx->out_size = ctx->stride * ctx->height * 2 ;
	}
	else if ( ctx->chroma_format_idc  == 3) { // 4:4:4
		ctx->out_size = ctx->stride * ctx->height * 3;
	}
	else {
		return GF_NOT_SUPPORTED;
	}
	
   
   
	if (ctx->output_as_8bit && (ctx->stride>ctx->width)) {
		ctx->stride /=2;
		ctx->out_size /= 2;
		ctx->chroma_bpp = ctx->luma_bpp = 8;
		ctx->conv_to_8bit = GF_TRUE;
		ctx->pack_mode = GF_FALSE;
	}
	ctx->dec_frames = 0;
	return GF_OK;
}