static GF_Err X11_InitOverlay(GF_VideoOutput *vout, u32 VideoWidth, u32 VideoHeight) { XWindow *xwin = (XWindow *)vout->opaque; if (xwin->overlay && (VideoWidth<=xwin->overlay->width) && (VideoHeight<=xwin->overlay->height)) { return GF_OK; } X11_DestroyOverlay(xwin); xwin->xvport = X11_GetXVideoPort(vout, GF_PIXEL_I420, 0); if (xwin->xvport<0) xwin->xvport = X11_GetXVideoPort(vout, GF_PIXEL_YUY2, 0); if (xwin->xvport<0) { return GF_NOT_SUPPORTED; } /* Create overlay image */ xwin->overlay = XvCreateImage(xwin->display, xwin->xvport, xwin->xv_pf_format, NULL, VideoWidth, VideoHeight); if (!xwin->overlay) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[X11] Xv Overlay Creation Failure\n")); return GF_IO_ERR; } GF_LOG(GF_LOG_INFO, GF_LOG_MMIO, ("[X11] Overlay init %d x %d - pixel format %s - XV port %d\n", VideoWidth, VideoHeight, gf_4cc_to_str(vout->yuv_pixel_format), xwin->xvport )); return GF_OK; }
GF_EXPORT GF_Err gf_isom_set_meta_type(GF_ISOFile *file, Bool root_meta, u32 track_num, u32 metaType) { char szName[20]; GF_MetaBox *meta; GF_Err e = CanAccessMovie(file, GF_ISOM_OPEN_WRITE); if (e) return e; meta = gf_isom_get_meta(file, root_meta, track_num); if (!meta) { if (!metaType) return GF_OK; meta = (GF_MetaBox *) meta_New(); if (root_meta) { file->meta = meta; gf_list_add(file->TopBoxes, meta); } else { gf_isom_insert_moov(file); if (!track_num) { file->moov->meta = meta; } else { GF_TrackBox *tk = (GF_TrackBox *)gf_list_get(file->moov->trackList, track_num-1); if (!tk) { gf_isom_box_del((GF_Box *)meta); return GF_BAD_PARAM; } tk->meta = meta; } } } else if (!metaType) { if (root_meta) { gf_list_del_item(file->TopBoxes, meta); gf_isom_box_del((GF_Box *)file->meta); file->meta = NULL; } else if (file->moov) { if (!track_num) { gf_isom_box_del((GF_Box *)file->moov->meta); file->moov->meta = NULL; } else { GF_TrackBox *tk = (GF_TrackBox *)gf_list_get(file->moov->trackList, track_num-1); if (!tk) return GF_BAD_PARAM; gf_isom_box_del((GF_Box *)tk->meta); tk->meta = NULL; } } return GF_OK; } if (!meta->handler) meta->handler = (GF_HandlerBox *)hdlr_New(); if (meta->handler->nameUTF8) gf_free(meta->handler->nameUTF8); meta->handler->handlerType = metaType; sprintf(szName, "GPAC %s Handler", gf_4cc_to_str(metaType)); meta->handler->nameUTF8 = gf_strdup(szName); return GF_OK; }
void wxGPACControl::SetYUVLabel() { u32 yuv_format = gf_term_get_option(m_pApp->m_term, GF_OPT_YUV_FORMAT); if (!yuv_format) { m_yuvtxt->SetLabel(wxT("(No YUV used)")); } else { char str[100]; sprintf(str, "(%s used)", gf_4cc_to_str(yuv_format)); m_yuvtxt->SetLabel(wxString(str, wxConvUTF8) ); } }
GF_EXPORT GF_BaseInterface *gf_modules_load_interface(GF_ModuleManager *pm, u32 whichplug, u32 InterfaceFamily) { const char *opt; char szKey[32]; ModuleInstance *inst; GF_BaseInterface *ifce; if (!pm){ GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] gf_modules_load_interface() : No Module Manager set\n")); return NULL; } inst = (ModuleInstance *) gf_list_get(pm->plug_list, whichplug); if (!inst){ GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] gf_modules_load_interface() : no module %d exist.\n", whichplug)); return NULL; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface...%s\n", inst->name)); /*look in cache*/ if (!pm->cfg){ GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] No pm->cfg has been set !!!\n")); return NULL; } opt = gf_cfg_get_key(pm->cfg, "PluginsCache", inst->name); if (opt) { const char * ifce_str = gf_4cc_to_str(InterfaceFamily); snprintf(szKey, 32, "%s:yes", ifce_str ? ifce_str : "(null)"); if (!strstr(opt, szKey)){ return NULL; } } if (!gf_modules_load_library(inst)) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot load library %s\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); return NULL; } if (!inst->query_func) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Library %s missing GPAC export symbols\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); goto err_exit; } /*build cache*/ if (!opt) { u32 i; Bool found = 0; char *key; const u32 *si = inst->query_func(); if (!si) { GF_LOG(GF_LOG_WARNING, GF_LOG_CORE, ("[Core] GPAC module %s has no supported interfaces - disabling\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); goto err_exit; } i=0; while (si[i]) i++; key = gf_malloc(sizeof(char) * 10 * i); key[0] = 0; i=0; while (si[i]) { snprintf(szKey, 32, "%s:yes ", gf_4cc_to_str(si[i])); strcat(key, szKey); if (InterfaceFamily==si[i]) found = 1; i++; } gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, key); gf_free(key); if (!found) goto err_exit; } if (!inst->query_func || !inst->query_func(InterfaceFamily) ) goto err_exit; ifce = (GF_BaseInterface *) inst->load_func(InterfaceFamily); /*sanity check*/ if (!ifce) goto err_exit; #if defined(TARGET_OS_IPHONE) || defined(TARGET_IPHONE_SIMULATOR) if (!strcmp(inst->name, "gm_sdl_out.dylib")) { if (InterfaceFamily == GF_VIDEO_OUTPUT_INTERFACE) { ifce = SDL_Module_Load_Video(); fprintf(stderr, "*** Loading SDL Video: %p ***\n", ifce); } else if (InterfaceFamily == GF_AUDIO_OUTPUT_INTERFACE) { ifce = SDL_Module_Load_Audio(); fprintf(stderr, "*** Loading SDL Audio: %p ***\n", ifce); } } #endif if (!ifce->module_name || (ifce->InterfaceType != InterfaceFamily)) { inst->destroy_func(ifce); goto err_exit; } gf_list_add(inst->interfaces, ifce); /*keep track of parent*/ ifce->HPLUG = inst; GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s DONE.\n", inst->name)); return ifce; err_exit: GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s exit label, freing library...\n", inst->name)); gf_modules_unload_library(inst); GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s EXIT.\n", inst->name)); return NULL; }
static void composite_update(GF_TextureHandler *txh) { s32 w, h; GF_STENCIL stencil; M_Background2D *back; GF_TraverseState *tr_state; Bool invalidate_all; u32 new_pixel_format; GF_Compositor *compositor = (GF_Compositor *)txh->compositor; CompositeTextureStack *st = (CompositeTextureStack *) gf_node_get_private(txh->owner); GF_Raster2D *raster = st->visual->compositor->rasterizer; if (st->unsupported) return; /* if (compositor->recompute_ar) { gf_node_dirty_set(txh->owner, 0, 0); return; } */ if (!compositor->rebuild_offscreen_textures && (!compositor->text_edit_changed || !st->visual->has_text_edit ) && !gf_node_dirty_get(txh->owner)) { txh->needs_refresh = 0; return; } gf_node_dirty_clear(st->txh.owner, 0); new_pixel_format = 0; back = gf_list_get(st->visual->back_stack, 0); if (back && back->isBound) new_pixel_format = GF_PIXEL_RGB_24; else new_pixel_format = GF_PIXEL_RGBA; #ifdef GPAC_USE_TINYGL /*TinyGL pixel format is fixed at compile time, we cannot override it !*/ if (st->visual->type_3d) new_pixel_format = GF_PIXEL_RGBA; #else #ifndef GPAC_DISABLE_3D /*no alpha support in offscreen rendering*/ if ( (st->visual->type_3d) && !(compositor->video_out->hw_caps & GF_VIDEO_HW_OPENGL_OFFSCREEN_ALPHA)) new_pixel_format = GF_PIXEL_RGB_24; #endif /*in OpenGL_ES, only RGBA can be safelly used with glReadPixels*/ #ifdef GPAC_USE_OGL_ES new_pixel_format = GF_PIXEL_RGBA; #endif #endif /*FIXME - we assume RGB+Depth+bitshape, we should check with the video out module*/ #if defined(GF_SR_USE_DEPTH) && !defined(GPAC_DISABLE_3D) if (st->visual->type_3d && (compositor->video_out->hw_caps & GF_VIDEO_HW_HAS_DEPTH) ) new_pixel_format = GF_PIXEL_RGBDS; #endif #ifndef GPAC_DISABLE_3D if (st->visual->type_3d>1) { w = ((M_CompositeTexture3D*)txh->owner)->pixelWidth; h = ((M_CompositeTexture3D*)txh->owner)->pixelHeight; } else #endif { w = ((M_CompositeTexture2D*)txh->owner)->pixelWidth; h = ((M_CompositeTexture2D*)txh->owner)->pixelHeight; } /*internal GPAC hacks for testing color spaces*/ if (w<-1) { w = -w; if (h<0) { h = -h; if (new_pixel_format==GF_PIXEL_RGBA) { new_pixel_format=GF_PIXEL_ARGB; } else { new_pixel_format=GF_PIXEL_BGR_24; } } else { if (new_pixel_format==GF_PIXEL_RGB_24) { new_pixel_format=GF_PIXEL_RGB_32; } } } else if (h<-1) { h = -h; if (new_pixel_format==GF_PIXEL_RGB_24) { new_pixel_format=GF_PIXEL_RGB_32; } } if (w<0) w = 0; if (h<0) h = 0; if (!w || !h) { if (txh->tx_io) { #ifdef GPAC_USE_TINYGL if (st->tgl_ctx) ostgl_delete_context(st->tgl_ctx); #endif gf_sc_texture_release(txh); if (txh->data) gf_free(txh->data); txh->data = NULL; txh->width = txh->height = txh->stride = 0; } return; } invalidate_all = compositor->rebuild_offscreen_textures; /*rebuild stencil*/ if (!txh->tx_io || (w != (s32) txh->width) || ( h != (s32) txh->height) || (new_pixel_format != txh->pixelformat) ) { Bool needs_stencil = 1; if (txh->tx_io) { #ifdef GPAC_USE_TINYGL if (st->tgl_ctx) ostgl_delete_context(st->tgl_ctx); #endif gf_sc_texture_release(txh); if (txh->data) gf_free(txh->data); txh->data = NULL; } /*we don't use rect ext because of no support for texture transforms*/ if (1 #ifndef GPAC_DISABLE_3D || compositor->gl_caps.npot_texture #endif ) { st->txh.width = w; st->txh.height = h; st->sx = st->sy = FIX_ONE; } else { st->txh.width = 2; while (st->txh.width<(u32)w) st->txh.width*=2; st->txh.height = 2; while (st->txh.height<(u32)h) st->txh.height*=2; st->sx = INT2FIX(st->txh.width) / w; st->sy = INT2FIX(st->txh.height) / h; } gf_sc_texture_allocate(txh); txh->pixelformat = new_pixel_format; switch (new_pixel_format) { case GF_PIXEL_RGBA: case GF_PIXEL_ARGB: txh->stride = txh->width * 4; txh->transparent = 1; break; case GF_PIXEL_RGB_565: txh->stride = txh->width * 2; txh->transparent = 0; break; case GF_PIXEL_RGBDS: txh->stride = txh->width * 4; txh->transparent = 1; break; case GF_PIXEL_RGB_24: txh->stride = txh->width * 3; txh->transparent = 0; break; } st->visual->width = txh->width; st->visual->height = txh->height; stencil = raster->stencil_new(raster, GF_STENCIL_TEXTURE); /*TODO - add support for compositeTexture3D when root is 2D visual*/ #ifndef GPAC_DISABLE_3D if (st->visual->type_3d) { GF_Compositor *compositor = st->visual->compositor; /*figure out what to do if main visual (eg video out) is not in OpenGL ...*/ if (!compositor->visual->type_3d) { /*create an offscreen window for OpenGL rendering*/ if ((compositor->offscreen_width < st->txh.width) || (compositor->offscreen_height < st->txh.height)) { #ifndef GPAC_USE_TINYGL GF_Err e; GF_Event evt; compositor->offscreen_width = MAX(compositor->offscreen_width, st->txh.width); compositor->offscreen_height = MAX(compositor->offscreen_height, st->txh.height); evt.type = GF_EVENT_VIDEO_SETUP; evt.setup.width = compositor->offscreen_width; evt.setup.height = compositor->offscreen_height; evt.setup.back_buffer = 0; evt.setup.opengl_mode = 2; e = compositor->video_out->ProcessEvent(compositor->video_out, &evt); if (e) { gf_sc_texture_release(txh); st->unsupported = 1; return; } /*reload openGL ext*/ gf_sc_load_opengl_extensions(compositor, 1); #endif } } else { needs_stencil = 0; } } #endif if (needs_stencil) { txh->data = (char*)gf_malloc(sizeof(unsigned char) * txh->stride * txh->height); memset(txh->data, 0, sizeof(unsigned char) * txh->stride * txh->height); /*set stencil texture - we don't check error as an image could not be supported by the rasterizer but still supported by the blitter (case of RGBD/RGBDS)*/ raster->stencil_set_texture(stencil, txh->data, txh->width, txh->height, txh->stride, txh->pixelformat, txh->pixelformat, 0); #ifdef GPAC_USE_TINYGL if (st->visual->type_3d && !compositor->visual->type_3d) { st->tgl_ctx = ostgl_create_context(txh->width, txh->height, txh->transparent ? 32 : 24, &txh->data, 1); GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[CompositeTexture] Creating TinyGL Offscreen context %p (%d %d - pf %s)\n", st->tgl_ctx, txh->width, txh->width, gf_4cc_to_str(txh->pixelformat))); } #endif } invalidate_all = 1; gf_sc_texture_set_stencil(txh, stencil); } if (!txh->tx_io) return; stencil = gf_sc_texture_get_stencil(txh); if (!stencil) return; #ifdef GPAC_USE_TINYGL if (st->tgl_ctx) ostgl_make_current(st->tgl_ctx, 0); #endif GF_SAFEALLOC(tr_state, GF_TraverseState); tr_state->vrml_sensors = gf_list_new(); tr_state->visual = st->visual; tr_state->invalidate_all = invalidate_all; tr_state->immediate_draw = st->visual->compositor->traverse_state->immediate_draw; gf_mx2d_init(tr_state->transform); gf_cmx_init(&tr_state->color_mat); tr_state->backgrounds = st->visual->back_stack; tr_state->viewpoints = st->visual->view_stack; tr_state->pixel_metrics = gf_sg_use_pixel_metrics(gf_node_get_graph(st->txh.owner)); tr_state->min_hsize = INT2FIX( MIN(txh->width, txh->height) ) / 2; tr_state->vp_size.x = INT2FIX(txh->width); tr_state->vp_size.y = INT2FIX(txh->height); composite_do_bindable(st->txh.owner, tr_state, st->first); st->first = 0; GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[CompositeTexture] Entering draw cycle\n")); txh->needs_refresh = visual_draw_frame(st->visual, st->txh.owner, tr_state, 0); txh->transparent = (st->visual->last_had_back==2) ? 0 : 1; if (!compositor->edited_text && st->visual->has_text_edit) st->visual->has_text_edit = 0; /*set active viewport in image coordinates top-left=(0, 0), not in BIFS*/ if (0 && gf_list_count(st->visual->view_stack)) { M_Viewport *vp = (M_Viewport *)gf_list_get(st->visual->view_stack, 0); if (vp->isBound) { SFVec2f size = vp->size; if (size.x >=0 && size.y>=0) { /*FIXME - we need tracking of VP changes*/ txh->needs_refresh = 1; } } } if (txh->needs_refresh) { #ifndef GPAC_DISABLE_3D if (st->visual->camera.is_3D) { if (st->visual->compositor->visual->type_3d) { #ifndef GPAC_USE_TINYGL gf_sc_copy_to_texture(&st->txh); #else /*in TinyGL we only need to push associated bitmap to the texture*/ gf_sc_texture_push_image(&st->txh, 0, 0); #endif } else { #ifndef GPAC_USE_TINYGL gf_sc_copy_to_stencil(&st->txh); #else if (txh->pixelformat==GF_PIXEL_RGBDS) gf_get_tinygl_depth(&st->txh); #endif } } else #endif { if (raster->stencil_texture_modified) raster->stencil_texture_modified(stencil); gf_sc_texture_set_stencil(txh, stencil); } gf_sc_invalidate(st->txh.compositor, NULL); } gf_list_del(tr_state->vrml_sensors); gf_free(tr_state); GF_LOG(GF_LOG_DEBUG, GF_LOG_COMPOSE, ("[CompositeTexture] Leaving draw cycle\n")); }
GF_Err tx3g_Read(GF_Box *s, GF_BitStream *bs) { GF_Err e; GF_Box *a; GF_Tx3gSampleEntryBox *ptr = (GF_Tx3gSampleEntryBox*)s; if (ptr->size < 18 + GPP_BOX_SIZE + GPP_STYLE_SIZE) return GF_ISOM_INVALID_FILE; gf_bs_read_data(bs, ptr->reserved, 6); ptr->dataReferenceIndex = gf_bs_read_u16(bs); ptr->displayFlags = gf_bs_read_u32(bs); ptr->horizontal_justification = gf_bs_read_u8(bs); ptr->vertical_justification = gf_bs_read_u8(bs); ptr->back_color = gpp_read_rgba(bs); gpp_read_box(bs, &ptr->default_box); gpp_read_style(bs, &ptr->default_style); ptr->size -= 18 + GPP_BOX_SIZE + GPP_STYLE_SIZE; while (ptr->size>=8) { e = gf_isom_parse_box(&a, bs); if (e) return e; if (ptr->size<a->size) { GF_LOG(GF_LOG_WARNING, GF_LOG_CONTAINER, ("[iso file] Box \"%s\" larger than remaining bytes in tx3g - ignoring box\n", gf_4cc_to_str(a->type))); ptr->size = 0; gf_isom_box_del(a); return GF_OK; } ptr->size -= a->size; if (a->type==GF_ISOM_BOX_TYPE_FTAB) { if (ptr->font_table) gf_isom_box_del((GF_Box *) ptr->font_table); ptr->font_table = (GF_FontTableBox *)a; } else { e = gf_isom_box_add_default(s, a); if (e) return e; } } return GF_OK; }
GF_Box *boxstring_new_with_data(u32 type, const char *string) { GF_Box *a=NULL; switch (type) { case GF_ISOM_BOX_TYPE_VTTC_CONFIG: case GF_ISOM_BOX_TYPE_CTIM: case GF_ISOM_BOX_TYPE_IDEN: case GF_ISOM_BOX_TYPE_STTG: case GF_ISOM_BOX_TYPE_PAYL: case GF_ISOM_BOX_TYPE_VTTA: if (string) { /* remove trailing spaces; spec. \r, \n; skip if empty */ size_t len = strlen(string); char const* last = string + len-1; while (len && isspace(*last--)) --len; if (len && (a = gf_isom_box_new(type))) { // strndup char* str = ((GF_StringBox *)a)->string = gf_malloc(len + 1); memcpy(str, string, len); str[len] = '\0'; } } break; default: GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso file] Box type %s is not a boxstring, cannot initialize with data\n", gf_4cc_to_str(type) )); break; } return a; }
/** * function DirectFBVid_Blit * - blit a surface **/ static GF_Err DirectFBVid_Blit(GF_VideoOutput *driv, GF_VideoSurface *video_src, GF_Window *src_wnd, GF_Window *dst_wnd, u32 overlay_type) { u32 ret; DirectFBVID(); ret = DirectFBVid_BlitWrapper(ctx, video_src->width, video_src->height, video_src->pixel_format, video_src->video_buffer, video_src->pitch_y, src_wnd->x, src_wnd->y, src_wnd->w, src_wnd->h, dst_wnd->x, dst_wnd->y, dst_wnd->w, dst_wnd->h, overlay_type); if (ret) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[DirectFB] cannot create blit source surface for pixel format %s\n", gf_4cc_to_str(video_src->pixel_format))); return GF_NOT_SUPPORTED; } GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[DirectFB] blit successful\n")); return GF_OK; }
static Bool check_mpeg4_systems(GF_InputService *plug, GF_ISOFile *mov) { char *opt, *bname, *br, *next; u32 i, count, brand, has_mpeg4; GF_Err e; e = gf_isom_get_brand_info(mov, &brand, &i, &count); /*no brand == MP4 v1*/ if (e || !brand) return 1; has_mpeg4 = 0; if ((brand==GF_ISOM_BRAND_MP41) || (brand==GF_ISOM_BRAND_MP42)) has_mpeg4 = 1; opt = (char*) gf_modules_get_option((GF_BaseInterface *)plug, "ISOReader", "IgnoreMPEG-4ForBrands"); if (!opt) { gf_modules_set_option((GF_BaseInterface *)plug, "ISOReader", "IgnoreMPEG-4ForBrands", "nd*"); opt = (char*) gf_modules_get_option((GF_BaseInterface *)plug, "ISOReader", "IgnoreMPEG-4ForBrands"); } for (i=0; i<count; i++) { e = gf_isom_get_alternate_brand(mov, i+1, &brand); if (e) return 0; if ((brand==GF_ISOM_BRAND_MP41) || (brand==GF_ISOM_BRAND_MP42)) { has_mpeg4 = 1; continue; } bname = (char*)gf_4cc_to_str(brand); br = opt; while (br) { Bool ignore = 0; u32 orig_len, len; next = strchr(br, ' '); if (next) next[0] = 0; len = orig_len = strlen(br); while (len) { if (br[len-1]=='*') { br[len-1]=0; len--; } else { break; } } /*ignor all brands*/ if (!len) ignore = 1; else if (!strncmp(bname, br, len)) ignore = 1; while (len<orig_len) { br[len] = '*'; len++; } if (next) { next[0] = ' '; br = next + 1; } else { br = NULL; } if (ignore) return 0; } } return has_mpeg4; }
GF_EXPORT const char *gf_esd_get_textual_description(GF_ESD *esd) { if (!esd || !esd->decoderConfig) return "Bad parameter"; switch (esd->decoderConfig->streamType) { case GF_STREAM_OD: return "MPEG-4 Object Descriptor"; case GF_STREAM_OCR: return "MPEG-4 Object Clock Reference"; case GF_STREAM_SCENE: switch (esd->decoderConfig->objectTypeIndication) { case 0x0: case 0x1: case 0x2: case 0x3: case 0xFF: return "MPEG-4 BIFS Scene Description"; case GPAC_OTI_SCENE_BIFS_EXTENDED: return "MPEG-4 Extended BIFS Scene Description"; case GPAC_OTI_SCENE_AFX: if (!esd->decoderConfig->decoderSpecificInfo || !esd->decoderConfig->decoderSpecificInfo->data) return "AFX Unknown"; return gf_afx_get_type_description(esd->decoderConfig->decoderSpecificInfo->data[0]); case GPAC_OTI_SCENE_LASER: { GF_LASERConfig l_cfg; gf_odf_get_laser_config(esd->decoderConfig->decoderSpecificInfo, &l_cfg); if (! l_cfg.newSceneIndicator ) return "LASeR Scene Segment Description"; } return "LASeR Scene Description"; case GPAC_OTI_SCENE_SYNTHESIZED_TEXTURE: return "MPEG-4 Synthesized Texture"; case GPAC_OTI_SCENE_SAF: return "MPEG-4 SAF"; case GPAC_OTI_3GPP2_CMF: return "3GPP2 CMF"; default: return "Unknown Scene Type"; } break; case GF_STREAM_VISUAL: switch (esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_VIDEO_MPEG2_SIMPLE: return "MPEG-2 Visual Simple Profile"; case GPAC_OTI_VIDEO_MPEG2_MAIN: return "MPEG-2 Visual Main Profile"; case GPAC_OTI_VIDEO_MPEG2_SNR: return "MPEG-2 Visual SNR Profile"; case GPAC_OTI_VIDEO_MPEG2_SPATIAL: return "MPEG-2 Visual SNR Profile"; case GPAC_OTI_VIDEO_MPEG2_HIGH: return "MPEG-2 Visual SNR Profile"; case GPAC_OTI_VIDEO_MPEG2_422: return "MPEG-2 Visual SNR Profile"; case GPAC_OTI_VIDEO_MPEG1: return "MPEG-1 Video"; case GPAC_OTI_IMAGE_JPEG: return "JPEG Image"; case GPAC_OTI_IMAGE_PNG: return "PNG Image"; case GPAC_OTI_IMAGE_JPEG_2000: return "JPEG2000 Image"; case GPAC_OTI_VIDEO_MPEG4_PART2: return "MPEG-4 Part 2 Video"; case GPAC_OTI_VIDEO_AVC: return "MPEG-4 AVC|H264 Video"; case GPAC_OTI_VIDEO_AVC_PS: return "MPEG-4 AVC|H264 Parameter Set"; case GPAC_OTI_MEDIA_FFMPEG: return "GPAC FFMPEG Private Video"; case GPAC_OTI_VIDEO_SMPTE_VC1: return "SMPTE VC-1 Video"; case GPAC_OTI_VIDEO_DIRAC: return "Dirac Video"; default: return "Unknown Video type"; } break; case GF_STREAM_AUDIO: switch (esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_AUDIO_AAC_MPEG2_MP: return "MPEG-2 AAC Main Profile"; case GPAC_OTI_AUDIO_AAC_MPEG2_LCP: return "MPEG-2 AAC Low Complexity Profile"; case GPAC_OTI_AUDIO_AAC_MPEG2_SSRP: return "MPEG-2 AAC Scaleable Sampling Rate Profile"; case GPAC_OTI_AUDIO_MPEG2_PART3: return "MPEG-2 Audio Part 3"; case GPAC_OTI_AUDIO_MPEG1: return "MPEG-1 Audio"; case GPAC_OTI_AUDIO_AAC_MPEG4: { #ifdef GPAC_DISABLE_AV_PARSERS return "MPEG-4 AAC"; #else GF_M4ADecSpecInfo a_cfg; if (!esd->decoderConfig->decoderSpecificInfo) return "MPEG-4 AAC"; gf_m4a_get_config(esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, &a_cfg); return gf_m4a_object_type_name(a_cfg.base_object_type); #endif } break; case GPAC_OTI_MEDIA_FFMPEG: return "GPAC FFMPEG Private Audio"; case GPAC_OTI_AUDIO_EVRC_VOICE: return "EVRC Voice"; case GPAC_OTI_AUDIO_SMV_VOICE: return "SMV Voice"; case GPAC_OTI_AUDIO_AC3: return "AC-3 audio"; case GPAC_OTI_AUDIO_AC3_ENHANCED: return "Enhanced AC-3 Audio"; case GPAC_OTI_AUDIO_DRA: return "DRA Audio"; case GPAC_OTI_AUDIO_ITU_G719: return "ITU G719 Audio"; case GPAC_OTI_AUDIO_DTS_CA: return "DTS Coherent Acoustics audio"; case GPAC_OTI_AUDIO_DTS_HD_HR: return "DTS-HD High Resolution audio"; case GPAC_OTI_AUDIO_DTS_HD_MASTER: return "DTS-HD Master audios"; default: return "Unknown Audio Type"; } break; case GF_STREAM_MPEG7: return "MPEG-7 Description"; case GF_STREAM_IPMP: return "MPEG-4 IPMP"; case GF_STREAM_OCI: return "MPEG-4 OCI"; case GF_STREAM_MPEGJ: return "MPEG-4 MPEG-J"; case GF_STREAM_INTERACT: return "MPEG-4 User Interaction"; case GF_STREAM_IPMP_TOOL: return "MPEG-4 IPMP Tool"; case GF_STREAM_FONT: return "MPEG-4 Font Data"; case GF_STREAM_TEXT: return "MPEG-4 Streaming Text"; case GF_STREAM_ND_SUBPIC: return "Nero Digital Subpicture"; case GF_STREAM_PRIVATE_SCENE: switch (esd->decoderConfig->objectTypeIndication) { case GPAC_OTI_PRIVATE_SCENE_GENERIC: { char *ext = strchr(esd->decoderConfig->decoderSpecificInfo->data + 4, '.'); if (!ext) return "GPAC Internal Scene Description"; ext += 1; if (!strnicmp(ext, "bt", 2)) return "BT Scene Description"; if (!strnicmp(ext, "xmt", 2)) return "XMT Scene Description"; if (!strnicmp(ext, "wrl", 3)) return "VRML Scene Description"; if (!strnicmp(ext, "x3d", 3)) return "W3D Scene Description"; if (!strnicmp(ext, "x3dv", 4)) return "X3D Scene Description"; if (!strnicmp(ext, "swf", 3)) return "Flash (SWF) Scene Description"; if (!strnicmp(ext, "xsr", 3)) return "LASeR-ML Scene Description"; if (!strnicmp(ext, "wgt", 3)) return "W3C Widget Package"; if (!strnicmp(ext, "mgt", 3)) return "MPEG-U Widget Package"; } return "GPAC Internal Scene Description"; case GPAC_OTI_PRIVATE_SCENE_SVG: return "SVG"; case GPAC_OTI_PRIVATE_SCENE_LASER: return "LASeR (XML)"; case GPAC_OTI_PRIVATE_SCENE_XBL: return "XBL"; case GPAC_OTI_PRIVATE_SCENE_EPG: return "DVB Event Information"; case GPAC_OTI_PRIVATE_SCENE_WGT: return "W3C/MPEG-U Widget"; case GPAC_OTI_SCENE_SVG: return "SVG over RTP"; case GPAC_OTI_SCENE_SVG_GZ: return "SVG+gz over RTP"; case GPAC_OTI_SCENE_DIMS: return "3GPP DIMS"; default: return "Unknown Scene Description"; } break; case GF_STREAM_PRIVATE_MEDIA: return "Opaque Decoder"; case GF_STREAM_4CC: return gf_4cc_to_str(esd->decoderConfig->objectTypeIndication); default: return "Unknown Media Type"; } }
static GF_Err EVID_InitSurface(GF_VideoOutput *dr) { TInt gl_buffer_size; TInt e; TDisplayMode disp_mode; TSize s; EPOCVideo *ctx = (EPOCVideo *)dr->opaque; GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOC Video] Reseting video\n")); EVID_ResetSurface(dr, 0); GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOC Video] Video reset OK\n")); ctx->screen = new CWsScreenDevice(*ctx->session); if (!ctx->screen) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot create screen device for session\n")); return GF_IO_ERR; } e = ctx->screen->Construct(); if (e != KErrNone) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot construct screen device for session - error %d\n", e)); return GF_IO_ERR; } e = ctx->screen->CreateContext(ctx->gc); if (e != KErrNone) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot create graphical context - error %d\n", e)); return GF_IO_ERR; } ctx->surface = new CFbsBitmap(); if (!ctx->surface) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot allocate backbuffer surface\n")); return GF_IO_ERR; } s = ctx->window->Size(); disp_mode = ctx->screen->DisplayMode(); e = ctx->surface->Create(s, disp_mode); if (e != KErrNone) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot create backbuffer surface - error %d\n", e)); return GF_IO_ERR; } gl_buffer_size = 0; switch (disp_mode) { case EGray256: ctx->pixel_format = GF_PIXEL_GREYSCALE; ctx->bpp = 1; break; case EColor64K: ctx->pixel_format = GF_PIXEL_RGB_565; ctx->bpp = 2; gl_buffer_size = 16; break; case EColor16M: ctx->pixel_format = GF_PIXEL_RGB_24; ctx->bpp = 3; gl_buffer_size = 24; break; /** 4096 colour display (12 bpp). */ case EColor4K: ctx->pixel_format = GF_PIXEL_RGB_444; ctx->bpp = 2; gl_buffer_size = 12; break; /** True colour display mode (32 bpp, but top byte is unused and unspecified) */ case EColor16MU: ctx->pixel_format = GF_PIXEL_RGB_32; ctx->bpp = 4; gl_buffer_size = 32; break; #if defined(__SERIES60_3X__) /** Display mode with alpha (24bpp colour plus 8bpp alpha) */ case EColor16MA: ctx->pixel_format = GF_PIXEL_ARGB; ctx->bpp = 4; gl_buffer_size = 32; break; #endif default: GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Unsupported display type %d\n", disp_mode)); return GF_NOT_SUPPORTED; } ctx->width = s.iWidth; ctx->height = s.iHeight; #ifdef GPAC_USE_OGL_ES if (ctx->output_3d_type==1) { if (!gl_buffer_size) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Display mode not supported by OpenGL\n")); return GF_IO_ERR; } ctx->egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (ctx->egl_display == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot open OpenGL display\n")); return GF_IO_ERR; } if (eglInitialize(ctx->egl_display, NULL, NULL) == EGL_FALSE) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot initialize OpenGL display\n")); return GF_IO_ERR; } EGLConfig *configList = NULL; EGLint numOfConfigs = 0; EGLint configSize = 0; if (eglGetConfigs(ctx->egl_display, configList, configSize, &numOfConfigs) == EGL_FALSE) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot retrieve OpenGL configurations\n")); return GF_IO_ERR; } configSize = numOfConfigs; configList = (EGLConfig*) gf_malloc(sizeof(EGLConfig)*configSize); // Define properties for the wanted EGLSurface EGLint atts[7]; const char *opt = gf_modules_get_option((GF_BaseInterface *)dr, "Video", "GLNbBitsDepth"); atts[0] = EGL_BUFFER_SIZE; atts[1] = gl_buffer_size; atts[2] = EGL_DEPTH_SIZE; atts[3] = opt ? atoi(opt) : 16; atts[4] = EGL_SURFACE_TYPE; atts[5] = EGL_PIXMAP_BIT; atts[6] = EGL_NONE; if (eglChooseConfig(ctx->egl_display, atts, configList, configSize, &numOfConfigs) == EGL_FALSE) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot choose OpenGL configuration\n")); return GF_IO_ERR; } EGLConfig gl_cfg = configList[0]; gf_free(configList); ctx->egl_surface = eglCreatePixmapSurface(ctx->egl_display, gl_cfg, ctx->surface, NULL); if (ctx->egl_surface == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot create OpenGL surface\n")); return GF_IO_ERR; } ctx->egl_context = eglCreateContext(ctx->egl_display, gl_cfg, EGL_NO_CONTEXT, NULL); if (ctx->egl_context == NULL) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot create OpenGL context\n")); return GF_IO_ERR; } if (eglMakeCurrent(ctx->egl_display, ctx->egl_surface, ctx->egl_surface, ctx->egl_context) == EGL_FALSE) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[EPOC Video] Cannot bind OpenGL context to current thread\n")); return GF_IO_ERR; } GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOC Video] Video OpenGL setup\n")); } #endif GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[EPOC Video] Video setup OK - %d x %d @ PixelFormat %s\n", ctx->width, ctx->height, gf_4cc_to_str(ctx->pixel_format) )); return GF_OK; }
GF_Err gf_isom_hint_sample_read(GF_HintSample *ptr, GF_BitStream *bs, u32 sampleSize) { u16 i; u32 type; GF_HintPacket *pck; GF_Err e; char *szName = (ptr->hint_subtype==GF_ISOM_BOX_TYPE_RTCP_STSD) ? "RTCP" : "RTP"; u64 sizeIn, sizeOut; sizeIn = gf_bs_available(bs); switch (ptr->hint_subtype) { case GF_ISOM_BOX_TYPE_RTP_STSD: case GF_ISOM_BOX_TYPE_SRTP_STSD: case GF_ISOM_BOX_TYPE_RRTP_STSD: case GF_ISOM_BOX_TYPE_RTCP_STSD: break; case GF_ISOM_BOX_TYPE_FDP_STSD: ptr->size = gf_bs_read_u32(bs); type = gf_bs_read_u32(bs); if (type != GF_ISOM_BOX_TYPE_FDSA) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso] invalid FDT sample, top box type %s not fdsa\n", gf_4cc_to_str(type) )); return GF_ISOM_INVALID_MEDIA; } return gf_isom_box_read((GF_Box*)ptr, bs); default: return GF_NOT_SUPPORTED; } ptr->packetCount = gf_bs_read_u16(bs); ptr->reserved = gf_bs_read_u16(bs); if (ptr->packetCount>=sampleSize) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso] broken %s sample: %d packet_count indicated but only %d bytes in samples\n", szName, ptr->packetCount, sampleSize)); return GF_ISOM_INVALID_MEDIA; } for (i = 0; i < ptr->packetCount; i++) { if (! gf_bs_available(bs) ) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[iso] %s hint sample has no more data but still %d entries to read\n", szName, ptr->packetCount-i)); return GF_ISOM_INVALID_MEDIA; } pck = gf_isom_hint_pck_new(ptr->hint_subtype); pck->trackID = ptr->trackID; pck->sampleNumber = ptr->sampleNumber; gf_list_add(ptr->packetTable, pck); e = gf_isom_hint_pck_read(pck, bs); if (e) return e; } if (ptr->hint_subtype==GF_ISOM_BOX_TYPE_RTCP_STSD) return GF_OK; sizeOut = gf_bs_available(bs) - sizeIn; //do we have some more data after the packets ?? if ((u32)sizeOut < sampleSize) { ptr->dataLength = sampleSize - (u32)sizeOut; ptr->AdditionalData = (char*)gf_malloc(sizeof(char) * ptr->dataLength); gf_bs_read_data(bs, ptr->AdditionalData, ptr->dataLength); } return GF_OK; }