GF_EXPORT Bool gf_mo_get_visual_info(GF_MediaObject *mo, u32 *width, u32 *height, u32 *stride, u32 *pixel_ar, u32 *pixelFormat) { GF_CodecCapability cap; if ((mo->type != GF_MEDIA_OBJECT_VIDEO) && (mo->type!=GF_MEDIA_OBJECT_TEXT)) return 0; if (width) { cap.CapCode = GF_CODEC_WIDTH; gf_codec_get_capability(mo->odm->codec, &cap); *width = cap.cap.valueInt; } if (height) { cap.CapCode = GF_CODEC_HEIGHT; gf_codec_get_capability(mo->odm->codec, &cap); *height = cap.cap.valueInt; } if (mo->type==GF_MEDIA_OBJECT_TEXT) return 1; if (stride) { cap.CapCode = GF_CODEC_STRIDE; gf_codec_get_capability(mo->odm->codec, &cap); *stride = cap.cap.valueInt; } if (pixelFormat) { cap.CapCode = GF_CODEC_PIXEL_FORMAT; gf_codec_get_capability(mo->odm->codec, &cap); *pixelFormat = cap.cap.valueInt; } /*get PAR settings*/ if (pixel_ar) { cap.CapCode = GF_CODEC_PAR; gf_codec_get_capability(mo->odm->codec, &cap); *pixel_ar = cap.cap.valueInt; if (! (*pixel_ar & 0x0000FFFF)) *pixel_ar = 0; if (! (*pixel_ar & 0xFFFF0000)) *pixel_ar = 0; /**/ if (! *pixel_ar) { GF_Channel *ch; GF_NetworkCommand com; com.base.command_type = GF_NET_CHAN_GET_PIXEL_AR; ch = gf_list_get(mo->odm->channels, 0); if (!ch) return 0; com.base.on_channel = ch; com.par.hSpacing = com.par.vSpacing = 0; if (gf_term_service_command(ch->service, &com) == GF_OK) { if ((com.par.hSpacing>65535) || (com.par.vSpacing>65535)) { com.par.hSpacing>>=16; com.par.vSpacing>>=16; } if (com.par.hSpacing|| com.par.vSpacing) *pixel_ar = (com.par.hSpacing<<16) | com.par.vSpacing; } }
static GF_Err ResizeCompositionBuffer(GF_Codec *dec, u32 NewSize) { if (!dec || !dec->CB) return GF_BAD_PARAM; /*update config*/ gf_mo_update_caps(dec->odm->mo); /*bytes per sec not available: either video or audio not configured*/ if (!dec->bytes_per_sec) { if (NewSize && (NewSize != dec->CB->UnitSize) ) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[ODM] Resizing composition buffer for codec %s - %d bytes per unit\n", dec->decio->module_name, NewSize)); gf_cm_resize(dec->CB, NewSize); } } /*audio: make sure we have enough data in CM to entirely fill the HW audio buffer...*/ else { u32 unit_size, audio_buf_len, unit_count; GF_CodecCapability cap; unit_size = NewSize; /*a bit ugly, make some extra provision for speed >1. this is the drawback of working with pre-allocated memory for composition, we may get into cases where there will never be enough data for high speeds... FIXME - WE WILL NEED TO MOVE TO DYNAMIC CU BLOCKS IN ORDER TO SUPPORT ANY SPEED, BUT WHAT IS THE IMPACT FOR LOW RESOURCES DEVICES ??*/ // audio_buf_len = 1000; audio_buf_len = 200; cap.CapCode = GF_CODEC_BUFFER_MAX; gf_codec_get_capability(dec, &cap); unit_count = cap.cap.valueInt; /*at least 2 units for dec and render ...*/ if (unit_count<2) unit_count = 2; while (unit_size*unit_count*1000 < dec->bytes_per_sec*audio_buf_len) unit_count++; #ifdef __SYMBIAN32__ /*FIXME - symbian tests*/ unit_count = 10; #endif gf_cm_reinit(dec->CB, unit_size, unit_count); dec->CB->Min = unit_count/3; if (!dec->CB->Min) dec->CB->Min = 1; } if ((dec->type==GF_STREAM_VISUAL) && dec->odm->parentscene->is_dynamic_scene) { gf_scene_force_size_to_video(dec->odm->parentscene, dec->odm->mo); } return GF_OK; }
GF_EXPORT Bool gf_mo_get_visual_info(GF_MediaObject *mo, u32 *width, u32 *height, u32 *stride, u32 *pixel_ar, u32 *pixelFormat, Bool *is_flipped) { GF_CodecCapability cap; if ((mo->type != GF_MEDIA_OBJECT_VIDEO) && (mo->type!=GF_MEDIA_OBJECT_TEXT)) return GF_FALSE; if (width) { cap.CapCode = GF_CODEC_WIDTH; gf_codec_get_capability(mo->odm->codec, &cap); *width = cap.cap.valueInt; } if (height) { cap.CapCode = GF_CODEC_HEIGHT; gf_codec_get_capability(mo->odm->codec, &cap); *height = cap.cap.valueInt; } if (mo->type==GF_MEDIA_OBJECT_TEXT) return GF_TRUE; if (is_flipped) { cap.CapCode = GF_CODEC_FLIP; cap.cap.valueInt = 0; gf_codec_get_capability(mo->odm->codec, &cap); *is_flipped = cap.cap.valueInt ? GF_TRUE : GF_FALSE; } if (stride) { cap.CapCode = GF_CODEC_STRIDE; gf_codec_get_capability(mo->odm->codec, &cap); *stride = cap.cap.valueInt; } if (pixelFormat) { cap.CapCode = GF_CODEC_PIXEL_FORMAT; gf_codec_get_capability(mo->odm->codec, &cap); *pixelFormat = cap.cap.valueInt; if (mo->odm && mo->odm->parentscene->is_dynamic_scene) { #ifndef GPAC_DISABLE_VRML const char *name = gf_node_get_name(gf_event_target_get_node(gf_mo_event_target_get(mo, 0))); if (name && !strcmp(name, "DYN_VIDEO")) { const char *opt; u32 r, g, b, a; M_Background2D *back = (M_Background2D *) gf_sg_find_node_by_name(mo->odm->parentscene->graph, "DYN_BACK"); if (back) { switch (cap.cap.valueInt) { case GF_PIXEL_ARGB: case GF_PIXEL_RGBA: case GF_PIXEL_YUVA: opt = gf_cfg_get_key(mo->odm->term->user->config, "Compositor", "BackColor"); if (!opt) { gf_cfg_set_key(mo->odm->term->user->config, "Compositor", "BackColor", "FF999999"); opt = "FF999999"; } sscanf(opt, "%02X%02X%02X%02X", &a, &r, &g, &b); back->backColor.red = INT2FIX(r)/255; back->backColor.green = INT2FIX(g)/255; back->backColor.blue = INT2FIX(b)/255; break; default: back->backColor.red = back->backColor.green = back->backColor.blue = 0; break; } gf_node_dirty_set((GF_Node *)back, 0, GF_TRUE); } } #endif } } /*get PAR settings*/ if (pixel_ar) { cap.CapCode = GF_CODEC_PAR; gf_codec_get_capability(mo->odm->codec, &cap); *pixel_ar = cap.cap.valueInt; if (! (*pixel_ar & 0x0000FFFF)) *pixel_ar = 0; if (! (*pixel_ar & 0xFFFF0000)) *pixel_ar = 0; /**/ if (! *pixel_ar) { GF_Channel *ch; GF_NetworkCommand com; com.base.command_type = GF_NET_CHAN_GET_PIXEL_AR; ch = (GF_Channel *)gf_list_get(mo->odm->channels, 0); if (!ch) return GF_FALSE; com.base.on_channel = ch; com.par.hSpacing = com.par.vSpacing = 0; if (gf_term_service_command(ch->service, &com) == GF_OK) { if ((com.par.hSpacing>65535) || (com.par.vSpacing>65535)) { com.par.hSpacing>>=16; com.par.vSpacing>>=16; } if (com.par.hSpacing|| com.par.vSpacing) *pixel_ar = (com.par.hSpacing<<16) | com.par.vSpacing; } }
void gf_term_add_codec(GF_Terminal *term, GF_Codec *codec) { u32 i, count; Bool locked; Bool threaded; CodecEntry *cd; CodecEntry *ptr, *next; GF_CodecCapability cap; assert(codec); GF_LOG(GF_LOG_DEBUG, GF_LOG_MEDIA, ("[Terminal] Registering codec %s\n", codec->decio ? codec->decio->module_name : "RAW")); /*caution: the mutex can be grabbed by a decoder waiting for a mutex owned by the calling thread this happens when several scene codecs are running concurently and triggering play/pause on media*/ locked = gf_mx_try_lock(term->mm_mx); cd = mm_get_codec(term->codecs, codec); if (cd) goto exit; GF_SAFEALLOC(cd, CodecEntry); cd->dec = codec; if (!cd->dec->Priority) cd->dec->Priority = 1; /*we force audio codecs to be threaded in free mode, so that we avoid waiting in the audio renderer if another decoder is locking the main mutex this can happen when the audio decoder is running late*/ if (codec->type==GF_STREAM_AUDIO) { threaded = 1; } else { cap.CapCode = GF_CODEC_WANTS_THREAD; cap.cap.valueInt = 0; gf_codec_get_capability(codec, &cap); threaded = cap.cap.valueInt; } if (threaded) cd->flags |= GF_MM_CE_REQ_THREAD; if (term->flags & GF_TERM_MULTI_THREAD) { if ((codec->type==GF_STREAM_AUDIO) || (codec->type==GF_STREAM_VISUAL)) threaded = 1; } else if (term->flags & GF_TERM_SINGLE_THREAD) { threaded = 0; } if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) threaded = 0; if (threaded) { cd->thread = gf_th_new(cd->dec->decio->module_name); cd->mx = gf_mx_new(cd->dec->decio->module_name); cd->flags |= GF_MM_CE_THREADED; gf_list_add(term->codecs, cd); goto exit; } //add codec 1- per priority 2- per type, audio being first //priorities inherits from Systems (5bits) so range from 0 to 31 //we sort from MAX to MIN count = gf_list_count(term->codecs); for (i=0; i<count; i++) { ptr = (CodecEntry*)gf_list_get(term->codecs, i); if (ptr->flags & GF_MM_CE_THREADED) continue; //higher priority, continue if (ptr->dec->Priority > codec->Priority) continue; //same priority, put audio first if (ptr->dec->Priority == codec->Priority) { //we insert audio (0x05) before video (0x04) if (ptr->dec->type < codec->type) { gf_list_insert(term->codecs, cd, i); goto exit; } //same prior, same type: insert after if (ptr->dec->type == codec->type) { if (i+1==count) { gf_list_add(term->codecs, cd); } else { gf_list_insert(term->codecs, cd, i+1); } goto exit; } //we insert video (0x04) after audio (0x05) if next is not audio //last one if (i+1 == count) { gf_list_add(term->codecs, cd); goto exit; } next = (CodecEntry*)gf_list_get(term->codecs, i+1); //# priority level, insert if ((next->flags & GF_MM_CE_THREADED) || (next->dec->Priority != codec->Priority)) { gf_list_insert(term->codecs, cd, i+1); goto exit; } //same priority level and at least one after : continue continue; } gf_list_insert(term->codecs, cd, i); goto exit; } //if we got here, first in list gf_list_add(term->codecs, cd); exit: if (locked) gf_mx_v(term->mm_mx); return; }
GF_Err gf_codec_add_channel(GF_Codec *codec, GF_Channel *ch) { GF_Err e; GF_NetworkCommand com; GF_Channel *a_ch; char *dsi; u32 dsiSize, CUsize, i; GF_CodecCapability cap; u32 min, max; /*only for valid codecs (eg not OCR)*/ if (codec->decio) { com.get_dsi.dsi = NULL; dsi = NULL; dsiSize = 0; if (ch->esd->decoderConfig->upstream) codec->flags |= GF_ESM_CODEC_HAS_UPSTREAM; if (ch->esd->decoderConfig->decoderSpecificInfo) { dsi = ch->esd->decoderConfig->decoderSpecificInfo->data; dsiSize = ch->esd->decoderConfig->decoderSpecificInfo->dataLength; } /*For objects declared in OD stream, override with network DSI if any*/ if (ch->service && !(ch->odm->flags & GF_ODM_NOT_IN_OD_STREAM) ) { com.command_type = GF_NET_CHAN_GET_DSI; com.base.on_channel = ch; e = gf_term_service_command(ch->service, &com); if (!e && com.get_dsi.dsi) { dsi = com.get_dsi.dsi; dsiSize = com.get_dsi.dsi_len; if (ch->esd->decoderConfig->decoderSpecificInfo->data) gf_free(ch->esd->decoderConfig->decoderSpecificInfo->data); ch->esd->decoderConfig->decoderSpecificInfo->data = com.get_dsi.dsi; ch->esd->decoderConfig->decoderSpecificInfo->dataLength = com.get_dsi.dsi_len; } } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[Codec] Attaching stream %d to codec %s\n", ch->esd->ESID, codec->decio->module_name)); /*lock the channel before setup in case we are using direct_decode */ gf_mx_p(ch->mx); e = codec->decio->AttachStream(codec->decio, ch->esd); gf_mx_v(ch->mx); if (ch->esd->decoderConfig && ch->esd->decoderConfig->rvc_config) { gf_odf_desc_del((GF_Descriptor *)ch->esd->decoderConfig->rvc_config); ch->esd->decoderConfig->rvc_config = NULL; } if (e) { GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[Codec] Attach Stream failed %s\n", gf_error_to_string(e) )); return e; } /*ask codec for desired output capacity - note this may be 0 if stream is not yet configured*/ cap.CapCode = GF_CODEC_OUTPUT_SIZE; gf_codec_get_capability(codec, &cap); if (codec->CB && (cap.cap.valueInt != codec->CB->UnitSize)) { gf_cm_del(codec->CB); codec->CB = NULL; } CUsize = cap.cap.valueInt; /*get desired amount of units and minimal fullness (used for scheduling)*/ switch(codec->type) { case GF_STREAM_VISUAL: case GF_STREAM_AUDIO: cap.CapCode = GF_CODEC_BUFFER_MIN; gf_codec_get_capability(codec, &cap); min = cap.cap.valueInt; cap.CapCode = GF_CODEC_BUFFER_MAX; gf_codec_get_capability(codec, &cap); max = cap.cap.valueInt; break; case GF_STREAM_ND_SUBPIC: max = 1; min = 0; break; default: min = max = 0; } if ((codec->type==GF_STREAM_AUDIO) && (max<2)) max = 2; /*setup CB*/ if (!codec->CB && max) { if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) { max = 1; /*create a semaphore in non-notified stage*/ codec->odm->raw_frame_sema = gf_sema_new(1, 0); } GF_LOG(GF_LOG_DEBUG, GF_LOG_CODEC, ("[ODM] Creating composition buffer for codec %s - %d units %d bytes each\n", codec->decio->module_name, max, CUsize)); codec->CB = gf_cm_new(CUsize, max, (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) ? 1 : 0); codec->CB->Min = min; codec->CB->odm = codec->odm; } if (codec->CB) { /*check re-ordering - set by default on all codecs*/ codec->is_reordering = 1; cap.CapCode = GF_CODEC_REORDER; if (gf_codec_get_capability(codec, &cap) == GF_OK) codec->is_reordering = cap.cap.valueInt; } if (codec->flags & GF_ESM_CODEC_IS_RAW_MEDIA) { ch->is_raw_channel = 1; } /*setup net channel config*/ if (ch->service) { memset(&com, 0, sizeof(GF_NetworkCommand)); com.command_type = GF_NET_CHAN_CONFIG; com.base.on_channel = ch; com.cfg.priority = ch->esd->streamPriority; com.cfg.sync_id = ch->clock->clockID; memcpy(&com.cfg.sl_config, ch->esd->slConfig, sizeof(GF_SLConfig)); /*get the frame duration if audio (used by some network stack)*/ if (ch->odm->codec && (ch->odm->codec->type==GF_STREAM_AUDIO) ) { cap.CapCode = GF_CODEC_SAMPLERATE; gf_codec_get_capability(ch->odm->codec, &cap); com.cfg.sample_rate = cap.cap.valueInt; cap.CapCode = GF_CODEC_CU_DURATION; gf_codec_get_capability(ch->odm->codec, &cap); com.cfg.frame_duration = cap.cap.valueInt; } gf_term_service_command(ch->service, &com); ch->carousel_type = GF_ESM_CAROUSEL_NONE; if (com.cfg.use_m2ts_sections) { ch->carousel_type = GF_ESM_CAROUSEL_MPEG2; } else { switch (ch->esd->decoderConfig->streamType) { case GF_STREAM_OD: case GF_STREAM_SCENE: ch->carousel_type = ch->esd->slConfig->AUSeqNumLength ? GF_ESM_CAROUSEL_MPEG4 : GF_ESM_CAROUSEL_NONE; break; } } } } /*assign the first base layer as the codec clock by default, or current channel clock if no clock set Also assign codec priority here*/ if (!ch->esd->dependsOnESID || !codec->ck) { codec->ck = ch->clock; codec->Priority = ch->esd->streamPriority; /*insert base layer first - note we are sure this is a stream of the same type as the codec (other streams - OCI, MPEG7, MPEGJ - are not added that way)*/ return gf_list_insert(codec->inChannels, ch, 0); } else { /*make sure all channels are in order*/ i=0; while ((a_ch = (GF_Channel*)gf_list_enum(codec->inChannels, &i))) { if (ch->esd->dependsOnESID == a_ch->esd->ESID) { return gf_list_insert(codec->inChannels, ch, i); } if (a_ch->esd->dependsOnESID == ch->esd->ESID) { return gf_list_insert(codec->inChannels, ch, i-1); } } /*by default append*/ return gf_list_add(codec->inChannels, ch); } }