/* Attributes from the timed elements are not easy to use during runtime, the runtime info is a set of easy to use structures. This function initializes them (intervals, status ...) and registers the element with the scenegraph */ GF_EXPORT void gf_smil_timing_init_runtime_info(GF_Node *timed_elt) { GF_SceneGraph *sg; SMIL_Timing_RTI *rti; SMILTimingAttributesPointers *timingp = NULL; u32 tag = gf_node_get_tag(timed_elt); SVGAllAttributes all_atts; SVGTimedAnimBaseElement *e = (SVGTimedAnimBaseElement *)timed_elt; gf_svg_flatten_attributes((SVG_Element *)e, &all_atts); e->timingp = gf_malloc(sizeof(SMILTimingAttributesPointers)); e->timingp->begin = all_atts.begin; e->timingp->clipBegin = all_atts.clipBegin; e->timingp->clipEnd = all_atts.clipEnd; e->timingp->dur = all_atts.dur; e->timingp->end = all_atts.end; e->timingp->fill = all_atts.smil_fill; e->timingp->max = all_atts.max; e->timingp->min = all_atts.min; e->timingp->repeatCount = all_atts.repeatCount; e->timingp->repeatDur = all_atts.repeatDur; e->timingp->restart = all_atts.restart; timingp = e->timingp; if (!timingp) return; if (tag == TAG_SVG_audio || tag == TAG_SVG_video) { /* if the dur attribute is not set, then it should be set to media as this is the default for media elements see http://www.w3.org/TR/2005/REC-SMIL2-20051213/smil-timing.html#Timing-DurValueSemantics "For simple media elements that specify continuous media (i.e. media with an inherent notion of time), the implicit duration is the intrinsic duration of the media itself - e.g. video and audio files have a defined duration." TODO: Check if this should work with the animation element */ if (!e->timingp->dur) { SVGAttribute *att = gf_xml_create_attribute((GF_Node *)e, TAG_SVG_ATT_dur); e->timingp->dur = (SMIL_Duration *)att->data; e->timingp->dur->type = SMIL_DURATION_MEDIA; } } GF_SAFEALLOC(rti, SMIL_Timing_RTI) timingp->runtime = rti; rti->timed_elt = timed_elt; GF_LOG(GF_LOG_DEBUG, GF_LOG_SMIL, ("[SMIL Timing ] Time %f - Timed element %s - Initialization\n", gf_node_get_scene_time((GF_Node *)rti->timed_elt), gf_node_get_log_name((GF_Node *)rti->timed_elt))); rti->timingp = timingp; rti->status = SMIL_STATUS_WAITING_TO_BEGIN; rti->evaluate_status = SMIL_TIMING_EVAL_NONE; rti->evaluate = gf_smil_timing_null_timed_function; rti->scene_time = -1; rti->force_reevaluation = 0; rti->media_duration = -1; GF_SAFEALLOC(rti->current_interval, SMIL_Interval); gf_smil_timing_get_first_interval(rti); GF_SAFEALLOC(rti->next_interval, SMIL_Interval); gf_smil_timing_get_next_interval(rti, 0, rti->next_interval, rti->current_interval->begin); /* Now that the runtime info for this timed element is initialized, we can tell the scene graph that it can start notifying the scene time to this element. Because of the 'animation' element, we can have many scene graphs sharing the same scene time, we therefore add this timed element to the rootmost scene graph. */ sg = timed_elt->sgprivate->scenegraph; while (sg->parent_scene) sg = sg->parent_scene; gf_smil_timing_add_to_sg(sg, rti); }
DownloadedCacheEntry gf_cache_create_entry ( GF_DownloadManager * dm, const char * cache_directory, const char * url , u64 start_range, u64 end_range, Bool mem_storage) { char tmp[_CACHE_TMP_SIZE]; u8 hash[_CACHE_HASH_SIZE]; int sz; char ext[_CACHE_MAX_EXTENSION_SIZE]; DownloadedCacheEntry entry = NULL; if ( !dm || !url || !cache_directory) { GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry :%d, dm=%p, url=%s cache_directory=%s, aborting.\n", __LINE__, dm, url, cache_directory)); return entry; } sz = (u32) strlen ( url ); if ( sz > _CACHE_TMP_SIZE ) { GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d : ERROR, URL is too long (%d chars), more than %d chars.\n", __LINE__, sz, _CACHE_TMP_SIZE )); return entry; } tmp[0] = '\0'; /*generate hash of the full url*/ if (start_range && end_range) { sprintf(tmp, "%s_"LLD"-"LLD, url, start_range, end_range ); } else { strcpy ( tmp, url ); } gf_sha1_csum ((u8*) tmp, (u32) strlen ( tmp ), hash ); tmp[0] = 0; { int i; for ( i=0; i<20; i++ ) { char t[3]; t[2] = 0; sprintf ( t, "%02X", hash[i] ); strcat ( tmp, t ); } } assert ( strlen ( tmp ) == (_CACHE_HASH_SIZE * 2) ); GF_SAFEALLOC(entry, struct __DownloadedCacheEntryStruct); if ( !entry ) { GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("gf_cache_create_entry:%d : OUT of memory !\n", __LINE__)); return NULL; } GF_LOG(GF_LOG_DEBUG, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, entry=%p\n", __LINE__, entry)); entry->url = gf_strdup ( url ); entry->hash = gf_strdup ( tmp ); entry->memory_stored = mem_storage; entry->cacheSize = 0; entry->contentLength = 0; entry->serverETag = NULL; entry->diskETag = NULL; entry->flags = NO_VALIDATION; entry->validity = 0; entry->diskLastModified = NULL; entry->serverLastModified = NULL; entry->dm = dm; entry->range_start = start_range; entry->range_end = end_range; #ifdef ENABLE_WRITE_MX { char name[1024]; snprintf(name, sizeof(name)-1, "CachedEntryWriteMx=%p, url=%s", (void*) entry, url); entry->write_mutex = gf_mx_new(name); assert( entry->write_mutex); } #endif entry->deletableFilesOnDelete = 0; entry->write_session = NULL; entry->sessions = gf_list_new(); if (entry->memory_stored) { entry->cache_filename = gf_malloc ( strlen ("gmem://") + 8 + strlen("@") + 16 + 1); } else { /* Sizeof cache directory + hash + possible extension */ entry->cache_filename = gf_malloc ( strlen ( cache_directory ) + strlen(cache_file_prefix) + strlen(tmp) + _CACHE_MAX_EXTENSION_SIZE + 1); } if ( !entry->hash || !entry->url || !entry->cache_filename || !entry->sessions) { GF_Err err; /* Probably out of memory */ GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, aborting due to OUT of MEMORY !\n", __LINE__)); err = gf_cache_delete_entry ( entry ); assert ( err == GF_OK ); return NULL; } if (entry->memory_stored) { sprintf(entry->cache_filename, "gmem://%d@%p", entry->contentLength, entry->mem_storage); return entry; } tmp[0] = '\0'; strcpy ( entry->cache_filename, cache_directory ); strcat( entry->cache_filename, cache_file_prefix ); strcat ( entry->cache_filename, entry->hash ); strcpy ( tmp, url ); { char * parser; parser = strrchr ( tmp, '?' ); if ( parser ) parser[0] = '\0'; parser = strrchr ( tmp, '#' ); if ( parser ) parser[0] = '\0'; parser = strrchr ( tmp, '.' ); if ( parser && ( strlen ( parser ) < _CACHE_MAX_EXTENSION_SIZE ) ) strncpy(ext, parser, _CACHE_MAX_EXTENSION_SIZE); else strncpy(ext, default_cache_file_suffix, _CACHE_MAX_EXTENSION_SIZE); assert (strlen(ext)); strcat( entry->cache_filename, ext); } tmp[0] = '\0'; strcpy( tmp, cache_file_prefix); strcat( tmp, entry->hash ); strcat( tmp , ext); strcat ( tmp, cache_file_info_suffix ); entry->properties = gf_cfg_force_new ( cache_directory, tmp ); if ( !entry->properties ) { GF_Err err; /* OUT of memory ? */ GF_LOG(GF_LOG_WARNING, GF_LOG_NETWORK, ("[CACHE] gf_cache_create_entry:%d, aborting due to OUT of MEMORY !\n", __LINE__)); err = gf_cache_delete_entry ( entry ); assert ( err == GF_OK ); return NULL; } gf_cache_set_etag_on_disk(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_ETAG)); gf_cache_set_etag_on_server(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_ETAG)); gf_cache_set_mime_type(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_MIME_TYPE)); gf_cache_set_last_modified_on_disk(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_LAST_MODIFIED)); gf_cache_set_last_modified_on_server(entry, gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_LAST_MODIFIED)); { const char * keyValue = gf_cfg_get_key ( entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_URL ); if ( keyValue == NULL || stricmp ( url, keyValue ) ) entry->flags |= CORRUPTED; keyValue = gf_cfg_get_key(entry->properties, CACHE_SECTION_NAME, CACHE_SECTION_NAME_RANGE); if (keyValue) { u64 s, e; sscanf(keyValue, LLD"-"LLD, &s, &e); /*mark as corrupted if not same range (we don't support this for the time being ...*/ if ((s!=entry->range_start) || (e!=entry->range_end)) entry->flags |= CORRUPTED; } } gf_cache_check_if_cache_file_is_corrupted(entry); return entry; }
GF_Err gf_sk_accept(GF_Socket *sock, GF_Socket **newConnection) { u32 client_address_size; SOCKET sk; #ifndef __SYMBIAN32__ s32 ready; struct timeval timeout; fd_set Group; #endif *newConnection = NULL; if (!sock || !(sock->flags & GF_SOCK_IS_LISTENING) ) return GF_BAD_PARAM; #ifndef __SYMBIAN32__ //can we read? FD_ZERO(&Group); FD_SET(sock->socket, &Group); timeout.tv_sec = 0; timeout.tv_usec = SOCK_MICROSEC_WAIT; //TODO - check if this is correct ready = select((int) sock->socket+1, &Group, NULL, NULL, &timeout); if (ready == SOCKET_ERROR) { switch (LASTSOCKERROR) { case EAGAIN: return GF_IP_SOCK_WOULD_BLOCK; default: return GF_IP_NETWORK_FAILURE; } } if (!ready || !FD_ISSET(sock->socket, &Group)) return GF_IP_NETWORK_EMPTY; #endif #ifdef GPAC_HAS_IPV6 client_address_size = sizeof(struct sockaddr_in6); #else client_address_size = sizeof(struct sockaddr_in); #endif sk = accept(sock->socket, (struct sockaddr *) &sock->dest_addr, &client_address_size); //we either have an error or we have no connections if (sk == INVALID_SOCKET) { // if (sock->flags & GF_SOCK_NON_BLOCKING) return GF_IP_NETWORK_FAILURE; switch (LASTSOCKERROR) { case EAGAIN: return GF_IP_SOCK_WOULD_BLOCK; default: return GF_IP_NETWORK_FAILURE; } } (*newConnection) = (GF_Socket *) gf_malloc(sizeof(GF_Socket)); (*newConnection)->socket = sk; (*newConnection)->flags = sock->flags & ~GF_SOCK_IS_LISTENING; #ifdef GPAC_HAS_IPV6 memcpy( &(*newConnection)->dest_addr, &sock->dest_addr, client_address_size); memset(&sock->dest_addr, 0, sizeof(struct sockaddr_in6)); #else memcpy( &(*newConnection)->dest_addr, &sock->dest_addr, client_address_size); memset(&sock->dest_addr, 0, sizeof(struct sockaddr_in)); #endif #if defined(WIN32) || defined(_WIN32_WCE) wsa_init++; #endif (*newConnection)->dest_addr_len = client_address_size; return GF_OK; }
void bifs_to_vid(GF_ISOFile *file, char *szConfigFile, u32 width, u32 height, char *rad_name, u32 dump_type, char *out_dir, Double fps, s32 frameID, s32 dump_time) { GF_User user; BIFSVID b2v; u16 es_id; Bool first_dump, needs_raw; u32 i, j, di, count, timescale, frameNum; u32 duration, cur_time; GF_VideoSurface fb; GF_Err e; char old_driv[1024]; const char *test; char config_path[GF_MAX_PATH]; avi_t *avi_out; Bool reset_fps; GF_ESD *esd; char comp[5]; char *conv_buf; memset(&user, 0, sizeof(GF_User)); if (szConfigFile && strlen(szConfigFile)) { user.config = gf_cfg_init(config_path, NULL); } else { user.config = gf_cfg_init(NULL, NULL); } if (!user.config) { fprintf(stdout, "Error: Configuration File \"%s\" not found in %s\n", GPAC_CFG_FILE, config_path); return; } avi_out = NULL; conv_buf = NULL; esd = NULL; needs_raw = 0; test = gf_cfg_get_key(user.config, "General", "ModulesDirectory"); user.modules = gf_modules_new((const unsigned char *) test, user.config); strcpy(old_driv, "raw_out"); if (!gf_modules_get_count(user.modules)) { printf("Error: no modules found\n"); goto err_exit; } /*switch driver to raw_driver*/ test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (test) strcpy(old_driv, test); test = gf_cfg_get_key(user.config, "Compositor", "RendererName"); /*since we only support RGB24 for MP42AVI force using RAW out with 2D driver*/ if (test && strstr(test, "2D")) { gf_cfg_set_key(user.config, "Video", "DriverName", "Raw Video Output"); needs_raw = 1; } needs_raw = 0; user.init_flags = GF_TERM_NO_AUDIO | GF_TERM_FORCE_3D; b2v.sr = gf_sc_new(&user, 0, NULL); gf_sc_set_option(b2v.sr, GF_OPT_VISIBLE, 0); b2v.sg = gf_sg_new(); gf_sg_set_scene_time_callback(b2v.sg, get_scene_time, &b2v); gf_sg_set_init_callback(b2v.sg, node_init, &b2v); gf_sg_set_modified_callback(b2v.sg, node_modif, &b2v); /*load config*/ gf_sc_set_option(b2v.sr, GF_OPT_RELOAD_CONFIG, 1); b2v.bifs = gf_bifs_decoder_new(b2v.sg, 0); if (needs_raw) { test = gf_cfg_get_key(user.config, "Video", "DriverName"); if (stricmp(test, "raw_out") && stricmp(test, "Raw Video Output")) { printf("couldn't load raw output driver (%s used)\n", test); goto err_exit; } } strcpy(config_path, ""); if (out_dir) { strcat(config_path, out_dir); if (config_path[strlen(config_path)-1] != '\\') strcat(config_path, "\\"); } strcat(config_path, rad_name); strcat(config_path, "_bifs"); if (!dump_type) { strcat(config_path, ".avi"); avi_out = AVI_open_output_file(config_path); comp[0] = comp[1] = comp[2] = comp[3] = comp[4] = 0; if (!avi_out) goto err_exit; } for (i=0; i<gf_isom_get_track_count(file); i++) { esd = gf_isom_get_esd(file, i+1, 1); if (!esd) continue; if (!esd->dependsOnESID && (esd->decoderConfig->streamType == GF_STREAM_SCENE)) break; gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; } if (!esd) { printf("no bifs track found\n"); goto err_exit; } b2v.duration = gf_isom_get_media_duration(file, i+1); timescale = gf_isom_get_media_timescale(file, i+1); es_id = (u16) gf_isom_get_track_id(file, i+1); e = gf_bifs_decoder_configure_stream(b2v.bifs, es_id, esd->decoderConfig->decoderSpecificInfo->data, esd->decoderConfig->decoderSpecificInfo->dataLength, esd->decoderConfig->objectTypeIndication); if (e) { printf("BIFS init error %s\n", gf_error_to_string(e)); gf_odf_desc_del((GF_Descriptor *) esd); esd = NULL; goto err_exit; } if (dump_time>=0) dump_time = dump_time *1000 / timescale; gf_sc_set_scene(b2v.sr, b2v.sg); count = gf_isom_get_sample_count(file, i+1); reset_fps = 0; if (!fps) { fps = (Float) (count * timescale); fps /= (Double) (s64) b2v.duration; printf("Estimated BIFS FrameRate %g\n", fps); reset_fps = 1; } if (!width || !height) { gf_sg_get_scene_size_info(b2v.sg, &width, &height); } /*we work in RGB24, and we must make sure the pitch is %4*/ if ((width*3)%4) { printf("Adjusting width (%d) to have a stride multiple of 4\n", width); while ((width*3)%4) width--; } gf_sc_set_size(b2v.sr, width, height); gf_sc_draw_frame(b2v.sr); gf_sc_get_screen_buffer(b2v.sr, &fb); width = fb.width; height = fb.height; if (avi_out) { AVI_set_video(avi_out, width, height, fps, comp); conv_buf = gf_malloc(sizeof(char) * width * height * 3); } printf("Dumping at BIFS resolution %d x %d\n\n", width, height); gf_sc_release_screen_buffer(b2v.sr, &fb); cur_time = 0; duration = (u32)(timescale / fps); if (reset_fps) fps = 0; frameNum = 1; first_dump = 1; for (j=0; j<count; j++) { GF_ISOSample *samp = gf_isom_get_sample(file, i+1, j+1, &di); b2v.cts = samp->DTS + samp->CTS_Offset; /*apply command*/ gf_bifs_decode_au(b2v.bifs, es_id, samp->data, samp->dataLength, ((Double)(s64)b2v.cts)/1000.0); gf_isom_sample_del(&samp); if ((frameID>=0) && (j<(u32)frameID)) continue; if ((dump_time>=0) && ((u32) dump_time>b2v.cts)) continue; /*render frame*/ gf_sc_draw_frame(b2v.sr); /*needed for background2D !!*/ if (first_dump) { gf_sc_draw_frame(b2v.sr); first_dump = 0; } if (fps) { if (cur_time > b2v.cts) continue; while (1) { printf("dumped frame time %f (frame %d - sample %d)\r", ((Float)cur_time)/timescale, frameNum, j+1); dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, frameNum); frameNum++; cur_time += duration; if (cur_time > b2v.cts) break; } } else { dump_frame(b2v, conv_buf, config_path, dump_type, avi_out, (frameID>=0) ? frameID : frameNum); if (frameID>=0 || dump_time>=0) break; frameNum++; printf("dumped frame %d / %d\r", j+1, count); } } gf_odf_desc_del((GF_Descriptor *) esd); /*destroy everything*/ gf_bifs_decoder_del(b2v.bifs); gf_sg_del(b2v.sg); gf_sc_set_scene(b2v.sr, NULL); gf_sc_del(b2v.sr); err_exit: if (avi_out) AVI_close(avi_out); if (conv_buf) gf_free(conv_buf); if (user.modules) gf_modules_del(user.modules); if (needs_raw) gf_cfg_set_key(user.config, "Video", "DriverName", old_driv); gf_cfg_del(user.config); }
GF_EXPORT GF_Err gf_rtsp_get_response(GF_RTSPSession *sess, GF_RTSPResponse *rsp) { GF_Err e; Bool force_reset = GF_FALSE; u32 BodyStart, size; if (!sess || !rsp) return GF_BAD_PARAM; gf_rtsp_response_reset(rsp); //LOCK gf_mx_p(sess->mx); e = gf_rtsp_check_connection(sess); if (e) goto exit; //push data in our queue e = gf_rtsp_fill_buffer(sess); if (e) goto exit; //this is interleaved data if (!IsRTSPMessage(sess->TCPBuffer+sess->CurrentPos) ) { gf_rtsp_session_read(sess); e = GF_IP_NETWORK_EMPTY; goto exit; } e = gf_rtsp_read_reply(sess); if (e) goto exit; //get the reply gf_rtsp_get_body_info(sess, &BodyStart, &size); e = RTSP_ParseResponseHeader(sess, rsp, BodyStart); //copy the body if any if (!e && rsp->Content_Length) { rsp->body = (char *)gf_malloc(sizeof(char) * (rsp->Content_Length)); memcpy(rsp->body, sess->TCPBuffer+sess->CurrentPos + BodyStart, rsp->Content_Length); } GF_LOG(GF_LOG_INFO, GF_LOG_RTP, ("[RTSP] Got Response:\n%s\n", sess->TCPBuffer+sess->CurrentPos)); //reset TCP buffer sess->CurrentPos += BodyStart + rsp->Content_Length; if (e) goto exit; //update RTSP aggreagation info if (sess->NbPending) sess->NbPending -= 1; if (sess->RTSP_State == GF_RTSP_STATE_WAITING) sess->RTSP_State = GF_RTSP_STATE_INIT; //control, and everything is received else if (sess->RTSP_State == GF_RTSP_STATE_WAIT_FOR_CONTROL) { if (!sess->NbPending) sess->RTSP_State = GF_RTSP_STATE_INIT; } //this is a late reply to an aggregated control - signal nothing if (!strcmp(sess->RTSPLastRequest, "RESET") && sess->CSeq > rsp->CSeq) { e = GF_IP_NETWORK_EMPTY; goto exit; } //reset last request if (sess->RTSP_State == GF_RTSP_STATE_INIT) strcpy(sess->RTSPLastRequest, ""); //check the CSeq is in the right range. The server should ALWAYS reply in sequence //to an aggreagated sequence of requests //if we have reseted the connection (due to an APP error) return empty if (rsp->CSeq && sess->CSeq > rsp->CSeq + sess->NbPending) { gf_mx_v(sess->mx); return gf_rtsp_get_response(sess, rsp); } if (sess->CSeq != rsp->CSeq + sess->NbPending) { GF_LOG(GF_LOG_ERROR, GF_LOG_RTP, ("[RTSP] Invalid sequence number - got %d but expected %d\n", sess->CSeq, rsp->CSeq + sess->NbPending)); e = GF_REMOTE_SERVICE_ERROR; goto exit; } /*check session ID*/ if (rsp->Session && sess->last_session_id && strcmp(sess->last_session_id, rsp->Session)) { e = GF_REMOTE_SERVICE_ERROR; goto exit; } //destroy sessionID if needed - real doesn't close the connection when destroying //session if (!strcmp(sess->RTSPLastRequest, GF_RTSP_TEARDOWN)) { sess->last_session_id = NULL; } exit: if (rsp->Connection && !stricmp(rsp->Connection, "Close")) force_reset = GF_TRUE; else if (e && (e != GF_IP_NETWORK_EMPTY)) force_reset = GF_TRUE; if (force_reset) { gf_rtsp_session_reset(sess, GF_FALSE); //destroy the socket if (sess->connection) gf_sk_del(sess->connection); sess->connection = NULL; //destroy the http tunnel if any if (sess->HasTunnel && sess->http) { gf_sk_del(sess->http); sess->http = NULL; } } gf_mx_v(sess->mx); return e; }
GF_EXPORT char *gf_url_concatenate(const char *parentName, const char *pathName) { u32 pathSepCount, i, prot_type; char *outPath, *name, *rad; char tmp[GF_MAX_PATH]; if (!pathName && !parentName) return NULL; if (!pathName) return gf_strdup(parentName); if (!parentName) return gf_strdup(pathName); if ( (strlen(parentName) > GF_MAX_PATH) || (strlen(pathName) > GF_MAX_PATH) ) return NULL; prot_type = URL_GetProtocolType(pathName); if (prot_type != GF_URL_TYPE_RELATIVE) { char *sep = NULL; if (pathName[0]=='/') sep = strstr(parentName, "://"); if (sep) sep = strchr(sep+3, '/'); if (sep) { u32 len; sep[0] = 0; len = (u32) strlen(parentName); outPath = (char*)gf_malloc(sizeof(char)*(len+1+strlen(pathName))); strcpy(outPath, parentName); strcat(outPath, pathName); sep[0] = '/'; } else { outPath = gf_strdup(pathName); } goto check_spaces; } /*old upnp addressing a la Platinum*/ rad = strstr(parentName, "%3fpath="); if (!rad) rad = strstr(parentName, "%3Fpath="); if (!rad) rad = strstr(parentName, "?path="); if (rad) { char *the_path; rad = strchr(rad, '='); rad[0] = 0; the_path = gf_strdup(rad+1); i=0; while (1) { if (the_path[i]==0) break; if (!strnicmp(the_path+i, "%5c", 3) || !strnicmp(the_path+i, "%2f", 3) ) { the_path[i] = '/'; memmove(the_path+i+1, the_path+i+3, strlen(the_path+i+3)+1); } else if (!strnicmp(the_path+i, "%05c", 4) || !strnicmp(the_path+i, "%02f", 4) ) { the_path[i] = '/'; memmove(the_path+i+1, the_path+i+4, strlen(the_path+i+4)+1); } i++; } name = gf_url_concatenate(the_path, pathName); outPath = gf_malloc(strlen(parentName) + strlen(name) + 2); sprintf(outPath, "%s=%s", parentName, name); rad[0] = '='; gf_free(name); gf_free(the_path); return outPath; } /*rewrite path to use / not % encoding*/ rad = strchr(parentName, '%'); if (rad && (!strnicmp(rad, "%5c", 3) || !strnicmp(rad, "%05c", 4) || !strnicmp(rad, "%2f", 3) || !strnicmp(rad, "%02f", 4))) { char *the_path = gf_strdup(parentName); i=0; while (1) { if (the_path[i]==0) break; if (!strnicmp(the_path+i, "%5c", 3) || !strnicmp(the_path+i, "%2f", 3) ) { the_path[i] = '/'; memmove(the_path+i+1, the_path+i+3, strlen(the_path+i+3)+1); } else if (!strnicmp(the_path+i, "%05c", 4) || !strnicmp(the_path+i, "%02f", 4) ) { the_path[i] = '/'; memmove(the_path+i+1, the_path+i+4, strlen(the_path+i+4)+1); } i++; } name = gf_url_concatenate(the_path, pathName); gf_free(the_path); return name; } pathSepCount = 0; name = NULL; if (pathName[0] == '.') { if (!strcmp(pathName, "..")) { pathSepCount = 1; name = ""; } if (!strcmp(pathName, "./")) { pathSepCount = 0; name = ""; } for (i = 0; i< strlen(pathName) - 2; i++) { /*current dir*/ if ( (pathName[i] == '.') && ( (pathName[i+1] == GF_PATH_SEPARATOR) || (pathName[i+1] == '/') ) ) { i++; continue; } /*parent dir*/ if ( (pathName[i] == '.') && (pathName[i+1] == '.') && ( (pathName[i+2] == GF_PATH_SEPARATOR) || (pathName[i+2] == '/') ) ) { pathSepCount ++; i+=2; name = (char *) &pathName[i+1]; } else { name = (char *) &pathName[i]; break; } } } if (!name) name = (char *) pathName; strcpy(tmp, parentName); while (strchr(" \r\n\t", tmp[strlen(tmp)-1])) { tmp[strlen(tmp)-1] = 0; } /*remove the last /*/ for (i = (u32) strlen(parentName); i > 0; i--) { //break our path at each separator if ((parentName[i-1] == GF_PATH_SEPARATOR) || (parentName[i-1] == '/')) { tmp[i-1] = 0; if (!pathSepCount) break; pathSepCount--; } } //if i==0, the parent path was relative, just return the pathName if (!i) { tmp[i] = 0; while (pathSepCount) { strcat(tmp, "../"); pathSepCount--; } } else { strcat(tmp, "/"); } i = (u32) strlen(tmp); outPath = (char *) gf_malloc(i + strlen(name) + 1); sprintf(outPath, "%s%s", tmp, name); /*cleanup paths sep for win32*/ for (i = 0; i<strlen(outPath); i++) if (outPath[i]=='\\') outPath[i] = '/'; check_spaces: i=0; while (outPath[i]) { if (outPath[i] == '?') break; if (outPath[i] != '%') { i++; continue; } if (!strnicmp(outPath+i, "%3f", 3)) break; if (!strnicmp(outPath+i, "%20", 3)) { outPath[i]=' '; memmove(outPath + i+1, outPath+i+3, strlen(outPath+i)-2); } i++; } return outPath; }
static Bool ft_enum_fonts(void *cbck, char *file_name, char *file_path) { char *szfont; FT_Face face; u32 num_faces, i; GF_FontReader *dr = cbck; FTBuilder *ftpriv = dr->udta; GF_LOG(GF_LOG_DEBUG, GF_LOG_PARSER, ("[FreeType] Enumerating font %s (%s)\n", file_name, file_path)); if (FT_New_Face(ftpriv->library, file_path, 0, & face )) return 0; if (!face || !face->family_name) return 0; num_faces = face->num_faces; /*locate right font in collection if several*/ for (i=0; i<num_faces; i++) { /*only scan scalable fonts*/ if (face->face_flags & FT_FACE_FLAG_SCALABLE) { Bool bold, italic; szfont = gf_malloc(sizeof(char)* (strlen(face->family_name)+100)); if (!szfont) continue; strcpy(szfont, face->family_name); /*remember first font found which looks like a alphabetical one*/ if (!ftpriv->font_dir) { u32 gidx; FT_Select_Charmap(face, FT_ENCODING_UNICODE); gidx = FT_Get_Char_Index(face, (u32) 'a'); if (gidx) gidx = FT_Get_Char_Index(face, (u32) 'z'); if (gidx) gidx = FT_Get_Char_Index(face, (u32) '1'); if (gidx) gidx = FT_Get_Char_Index(face, (u32) '@'); if (gidx) ftpriv->font_dir = gf_strdup(szfont); } bold = italic = 0; if (face->style_name) { char *name = gf_strdup(face->style_name); strupr(name); if (strstr(name, "BOLD")) bold = 1; if (strstr(name, "ITALIC")) italic = 1; /*if font is not regular style, append all styles blindly*/ if (!strstr(name, "REGULAR")) { strcat(szfont, " "); strcat(szfont, face->style_name); } gf_free(name); } else { if (face->style_flags & FT_STYLE_FLAG_BOLD) bold = 1; if (face->style_flags & FT_STYLE_FLAG_ITALIC) italic = 1; if (bold) strcat(szfont, " Bold"); if (italic) strcat(szfont, " Italic"); } gf_modules_set_option((GF_BaseInterface *)dr, "FontEngine", szfont, file_path); /*try to assign default fixed fonts*/ if (!bold && !italic) { strcpy(szfont, face->family_name); strlwr(szfont); if (face->face_flags & FT_FACE_FLAG_FIXED_WIDTH){ setBestFont(BEST_FIXED_FONTS, &(ftpriv->font_fixed), face->family_name); } setBestFont(BEST_SERIF_FONTS, &(ftpriv->font_serif), face->family_name); setBestFont(BEST_SANS_FONTS, &(ftpriv->font_sans), face->family_name); } gf_free(szfont); } FT_Done_Face(face); if (i+1==num_faces) return 0; /*load next font in collection*/ if (FT_New_Face(ftpriv->library, file_path, i+1, & face )) return 0; if (!face) return 0; } return 0; }
GF_Err gf_odf_parse_qos(GF_BitStream *bs, GF_QoS_Default **qos_qual, u32 *qual_size) { u32 tag, qos_size, val, bytesParsed, sizeHeader; GF_QoS_Default *newQoS; //tag tag = gf_bs_read_int(bs, 8); bytesParsed = 1; //size of instance qos_size = 0; sizeHeader = 0; do { val = gf_bs_read_int(bs, 8); sizeHeader++; qos_size <<= 7; qos_size |= val & 0x7F; } while ( val & 0x80 ); bytesParsed += sizeHeader; //Payload switch (tag) { case QoSMaxDelayTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxDelay)); ((GF_QoS_MaxDelay *)newQoS)->MaxDelay = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case QoSPrefMaxDelayTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_PrefMaxDelay)); ((GF_QoS_PrefMaxDelay *)newQoS)->PrefMaxDelay = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case QoSLossProbTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_LossProb)); ((GF_QoS_LossProb *)newQoS)->LossProb = gf_bs_read_float(bs); bytesParsed += 4; break; case QoSMaxGapLossTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxGapLoss)); ((GF_QoS_MaxGapLoss *)newQoS)->MaxGapLoss = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case QoSMaxAUSizeTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxAUSize)); ((GF_QoS_MaxAUSize *)newQoS)->MaxAUSize = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case QoSAvgAUSizeTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_AvgAUSize)); ((GF_QoS_AvgAUSize *)newQoS)->AvgAUSize = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case QoSMaxAURateTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxAURate)); ((GF_QoS_MaxAURate *)newQoS)->MaxAURate = gf_bs_read_int(bs, 32); bytesParsed += 4; break; case 0x00: case 0xFF: return GF_ODF_FORBIDDEN_DESCRIPTOR; default: //we defined the private qos... newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_Private)); ((GF_QoS_Private *)newQoS)->DataLength = qos_size; gf_bs_read_data(bs, ((GF_QoS_Private *)newQoS)->Data, ((GF_QoS_Private *)newQoS)->DataLength); bytesParsed += ((GF_QoS_Private *)newQoS)->DataLength; break; } newQoS->size = qos_size; newQoS->tag = tag; if (bytesParsed != 1 + qos_size + sizeHeader) { gf_odf_delete_qos_qual(newQoS); return GF_ODF_INVALID_DESCRIPTOR; } *qos_qual = newQoS; *qual_size = bytesParsed; return GF_OK; }
GF_QoS_Default *NewQoS(u8 tag) { GF_QoS_Default *newQoS; switch (tag) { case QoSMaxDelayTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxDelay)); ((GF_QoS_MaxDelay *)newQoS)->MaxDelay = 0; ((GF_QoS_MaxDelay *)newQoS)->size = 4; break; case QoSPrefMaxDelayTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_PrefMaxDelay)); ((GF_QoS_PrefMaxDelay *)newQoS)->PrefMaxDelay = 0; ((GF_QoS_PrefMaxDelay *)newQoS)->size = 4; break; case QoSLossProbTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_LossProb)); ((GF_QoS_LossProb *)newQoS)->LossProb = 0; ((GF_QoS_LossProb *)newQoS)->size = 4; break; case QoSMaxGapLossTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxGapLoss)); ((GF_QoS_MaxGapLoss *)newQoS)->MaxGapLoss = 0; ((GF_QoS_MaxGapLoss *)newQoS)->size = 4; break; case QoSMaxAUSizeTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxAUSize)); ((GF_QoS_MaxAUSize *)newQoS)->MaxAUSize = 0; ((GF_QoS_MaxAUSize *)newQoS)->size = 0; break; case QoSAvgAUSizeTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_AvgAUSize)); ((GF_QoS_AvgAUSize *)newQoS)->AvgAUSize = 0; ((GF_QoS_AvgAUSize *)newQoS)->size = 4; break; case QoSMaxAURateTag: newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_MaxAURate)); ((GF_QoS_MaxAURate *)newQoS)->MaxAURate = 0; ((GF_QoS_MaxAURate *)newQoS)->size = 4; break; case 0x00: case 0xFF: return NULL; default: //we defined the private qos... newQoS = (GF_QoS_Default *) gf_malloc(sizeof(GF_QoS_Private)); ((GF_QoS_Private *)newQoS)->DataLength = 0; ((GF_QoS_Private *)newQoS)->Data = NULL; break; } newQoS->tag = tag; return newQoS; }
static GF_ESD *FFD_GetESDescriptor(FFDemux *ffd, Bool for_audio) { GF_BitStream *bs; Bool dont_use_sl; GF_ESD *esd = (GF_ESD *) gf_odf_desc_esd_new(0); esd->ESID = 1 + (for_audio ? ffd->audio_st : ffd->video_st); esd->decoderConfig->streamType = for_audio ? GF_STREAM_AUDIO : GF_STREAM_VISUAL; esd->decoderConfig->avgBitrate = esd->decoderConfig->maxBitrate = 0; /*remap std object types - depending on input formats, FFMPEG may not have separate DSI from initial frame. In this case we have no choice but using FFMPEG decoders*/ if (for_audio) { AVCodecContext *dec = ffd->ctx->streams[ffd->audio_st]->codec; esd->slConfig->timestampResolution = ffd->audio_tscale.den; switch (dec->codec_id) { case CODEC_ID_MP2: esd->decoderConfig->objectTypeIndication = GPAC_OTI_AUDIO_MPEG1; break; case CODEC_ID_MP3: esd->decoderConfig->objectTypeIndication = GPAC_OTI_AUDIO_MPEG2_PART3; break; case CODEC_ID_AAC: if (!dec->extradata_size) goto opaque_audio; esd->decoderConfig->objectTypeIndication = GPAC_OTI_AUDIO_AAC_MPEG4; esd->decoderConfig->decoderSpecificInfo->dataLength = dec->extradata_size; esd->decoderConfig->decoderSpecificInfo->data = gf_malloc(sizeof(char)*dec->extradata_size); memcpy(esd->decoderConfig->decoderSpecificInfo->data, dec->extradata, sizeof(char)*dec->extradata_size); break; default: opaque_audio: esd->decoderConfig->objectTypeIndication = GPAC_OTI_MEDIA_FFMPEG; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, dec->codec_id); gf_bs_write_u32(bs, dec->sample_rate); gf_bs_write_u16(bs, dec->channels); gf_bs_write_u16(bs, dec->frame_size); gf_bs_write_u8(bs, 16); gf_bs_write_u8(bs, 0); /*ffmpeg specific*/ gf_bs_write_u16(bs, dec->block_align); gf_bs_write_u32(bs, dec->bit_rate); gf_bs_write_u32(bs, dec->codec_tag); if (dec->extradata_size) { gf_bs_write_data(bs, (char *) dec->extradata, dec->extradata_size); } gf_bs_get_content(bs, (char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); break; } dont_use_sl = ffd->unreliable_audio_timing; } else { AVCodecContext *dec = ffd->ctx->streams[ffd->video_st]->codec; esd->slConfig->timestampResolution = ffd->video_tscale.den; switch (dec->codec_id) { case CODEC_ID_MPEG4: /*there is a bug in fragmentation of raw H264 in ffmpeg, the NALU startcode (0x00000001) is split across two frames - we therefore force internal ffmpeg codec ID to avoid NALU size recompute at the decoder level*/ // case CODEC_ID_H264: /*if dsi not detected force use ffmpeg*/ if (!dec->extradata_size) goto opaque_video; /*otherwise use any MPEG-4 Visual*/ esd->decoderConfig->objectTypeIndication = (dec->codec_id==CODEC_ID_H264) ? GPAC_OTI_VIDEO_AVC : GPAC_OTI_VIDEO_MPEG4_PART2; esd->decoderConfig->decoderSpecificInfo->dataLength = dec->extradata_size; esd->decoderConfig->decoderSpecificInfo->data = gf_malloc(sizeof(char)*dec->extradata_size); memcpy(esd->decoderConfig->decoderSpecificInfo->data, dec->extradata, sizeof(char)*dec->extradata_size); break; case CODEC_ID_MPEG1VIDEO: esd->decoderConfig->objectTypeIndication = GPAC_OTI_VIDEO_MPEG1; break; case CODEC_ID_MPEG2VIDEO: esd->decoderConfig->objectTypeIndication = GPAC_OTI_VIDEO_MPEG2_422; break; default: opaque_video: esd->decoderConfig->objectTypeIndication = GPAC_OTI_MEDIA_FFMPEG; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, dec->codec_id); gf_bs_write_u16(bs, dec->width); gf_bs_write_u16(bs, dec->height); /*ffmpeg specific*/ gf_bs_write_u32(bs, dec->bit_rate); gf_bs_write_u32(bs, dec->codec_tag); gf_bs_write_u32(bs, dec->pix_fmt); if (dec->extradata_size) { gf_bs_write_data(bs, (char *) dec->extradata, dec->extradata_size); } gf_bs_get_content(bs, (char **) &esd->decoderConfig->decoderSpecificInfo->data, &esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); break; } dont_use_sl = 0; } if (dont_use_sl) { esd->slConfig->predefined = SLPredef_SkipSL; } else { /*only send full AUs*/ esd->slConfig->useAccessUnitStartFlag = esd->slConfig->useAccessUnitEndFlag = 0; esd->slConfig->hasRandomAccessUnitsOnlyFlag = 1; esd->slConfig->useTimestampsFlag = 1; } return esd; }
static GF_Err FFD_ConnectService(GF_InputService *plug, GF_ClientService *serv, const char *url) { GF_Err e; s64 last_aud_pts; u32 i; s32 res; Bool is_local; const char *sOpt; char *ext, szName[1024]; FFDemux *ffd = plug->priv; AVInputFormat *av_in = NULL; char szExt[20]; if (ffd->ctx) return GF_SERVICE_ERROR; assert( url && strlen(url) < 1024); strcpy(szName, url); ext = strrchr(szName, '#'); ffd->service_type = 0; e = GF_NOT_SUPPORTED; ffd->service = serv; if (ext) { if (!stricmp(&ext[1], "video")) ffd->service_type = 1; else if (!stricmp(&ext[1], "audio")) ffd->service_type = 2; ext[0] = 0; } /*some extensions not supported by ffmpeg, overload input format*/ ext = strrchr(szName, '.'); strcpy(szExt, ext ? ext+1 : ""); strlwr(szExt); if (!strcmp(szExt, "cmp")) av_in = av_find_input_format("m4v"); is_local = (strnicmp(url, "file://", 7) && strstr(url, "://")) ? 0 : 1; GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] opening file %s - local %d - av_in %08x\n", url, is_local, av_in)); if (!is_local) { AVProbeData pd; /*setup wraper for FFMPEG I/O*/ ffd->buffer_size = 8192; sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "FFMPEG", "IOBufferSize"); if (sOpt) ffd->buffer_size = atoi(sOpt); ffd->buffer = gf_malloc(sizeof(char)*ffd->buffer_size); #ifdef FFMPEG_DUMP_REMOTE ffd->outdbg = gf_f64_open("ffdeb.raw", "wb"); #endif #ifdef USE_PRE_0_7 init_put_byte(&ffd->io, ffd->buffer, ffd->buffer_size, 0, ffd, ff_url_read, NULL, NULL); ffd->io.is_streamed = 1; #else ffd->io.seekable = 1; #endif ffd->dnload = gf_term_download_new(ffd->service, url, GF_NETIO_SESSION_NOT_THREADED | GF_NETIO_SESSION_NOT_CACHED, NULL, ffd); if (!ffd->dnload) return GF_URL_ERROR; while (1) { u32 read; e = gf_dm_sess_fetch_data(ffd->dnload, ffd->buffer + ffd->buffer_used, ffd->buffer_size - ffd->buffer_used, &read); if (e==GF_EOS) break; /*we're sync!!*/ if (e==GF_IP_NETWORK_EMPTY) continue; if (e) goto err_exit; ffd->buffer_used += read; if (ffd->buffer_used == ffd->buffer_size) break; } if (e==GF_EOS) { const char *cache_file = gf_dm_sess_get_cache_name(ffd->dnload); res = open_file(&ffd->ctx, cache_file, av_in); } else { pd.filename = szName; pd.buf_size = ffd->buffer_used; pd.buf = (u8 *) ffd->buffer; av_in = av_probe_input_format(&pd, 1); if (!av_in) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] error probing file %s - probe start with %c %c %c %c\n", url, ffd->buffer[0], ffd->buffer[1], ffd->buffer[2], ffd->buffer[3])); return GF_NOT_SUPPORTED; } /*setup downloader*/ av_in->flags |= AVFMT_NOFILE; #if FF_API_FORMAT_PARAMETERS /*commit ffmpeg 603b8bc2a109978c8499b06d2556f1433306eca7*/ res = avformat_open_input(&ffd->ctx, szName, av_in, NULL); #else res = av_open_input_stream(&ffd->ctx, &ffd->io, szName, av_in, NULL); #endif } } else { res = open_file(&ffd->ctx, szName, av_in); } switch (res) { #ifndef _WIN32_WCE case 0: e = GF_OK; break; case AVERROR_IO: e = GF_URL_ERROR; goto err_exit; case AVERROR_INVALIDDATA: e = GF_NON_COMPLIANT_BITSTREAM; goto err_exit; case AVERROR_NOMEM: e = GF_OUT_OF_MEM; goto err_exit; case AVERROR_NOFMT: e = GF_NOT_SUPPORTED; goto err_exit; #endif default: e = GF_SERVICE_ERROR; goto err_exit; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] looking for streams in %s - %d streams - type %s\n", ffd->ctx->filename, ffd->ctx->nb_streams, ffd->ctx->iformat->name)); res = av_find_stream_info(ffd->ctx); if (res <0) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] cannot locate streams - error %d\n", res)); e = GF_NOT_SUPPORTED; goto err_exit; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CONTAINER, ("[FFMPEG] file %s opened - %d streams\n", url, ffd->ctx->nb_streams)); /*figure out if we can use codecs or not*/ ffd->audio_st = ffd->video_st = -1; for (i = 0; i < ffd->ctx->nb_streams; i++) { AVCodecContext *enc = ffd->ctx->streams[i]->codec; switch(enc->codec_type) { case AVMEDIA_TYPE_AUDIO: if ((ffd->audio_st<0) && (ffd->service_type!=1)) { ffd->audio_st = i; ffd->audio_tscale = ffd->ctx->streams[i]->time_base; } break; case AVMEDIA_TYPE_VIDEO: if ((ffd->video_st<0) && (ffd->service_type!=2)) { ffd->video_st = i; ffd->video_tscale = ffd->ctx->streams[i]->time_base; } break; default: break; } } if ((ffd->service_type==1) && (ffd->video_st<0)) goto err_exit; if ((ffd->service_type==2) && (ffd->audio_st<0)) goto err_exit; if ((ffd->video_st<0) && (ffd->audio_st<0)) { GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] No supported streams in file\n")); goto err_exit; } sOpt = gf_modules_get_option((GF_BaseInterface *)plug, "FFMPEG", "DataBufferMS"); ffd->data_buffer_ms = 0; if (sOpt) ffd->data_buffer_ms = atoi(sOpt); if (!ffd->data_buffer_ms) ffd->data_buffer_ms = FFD_DATA_BUFFER; /*build seek*/ if (is_local) { /*check we do have increasing pts. If not we can't rely on pts, we must skip SL we assume video pts is always present*/ if (ffd->audio_st>=0) { last_aud_pts = 0; for (i=0; i<20; i++) { AVPacket pkt; pkt.stream_index = -1; if (av_read_frame(ffd->ctx, &pkt) <0) break; if (pkt.pts == AV_NOPTS_VALUE) pkt.pts = pkt.dts; if (pkt.stream_index==ffd->audio_st) last_aud_pts = pkt.pts; } if (last_aud_pts*ffd->audio_tscale.den<10*ffd->audio_tscale.num) ffd->unreliable_audio_timing = 1; } ffd->seekable = (av_seek_frame(ffd->ctx, -1, 0, AVSEEK_FLAG_BACKWARD)<0) ? 0 : 1; if (!ffd->seekable) { av_close_input_file(ffd->ctx); ffd->ctx = NULL; open_file(&ffd->ctx, szName, av_in); av_find_stream_info(ffd->ctx); } } /*let's go*/ gf_term_on_connect(serv, NULL, GF_OK); /*if (!ffd->service_type)*/ FFD_SetupObjects(ffd); ffd->service_type = 0; return GF_OK; err_exit: GF_LOG(GF_LOG_ERROR, GF_LOG_CONTAINER, ("[FFMPEG] Error opening file %s: %s\n", url, gf_error_to_string(e))); if (ffd->ctx) av_close_input_file(ffd->ctx); ffd->ctx = NULL; gf_term_on_connect(serv, NULL, e); return GF_OK; }
static Bool FFD_CanHandleURL(GF_InputService *plug, const char *url) { Bool has_audio, has_video; s32 i; AVFormatContext *ctx; AVOutputFormat *fmt_out; Bool ret = 0; char *ext, szName[1000], szExt[20]; const char *szExtList; if (!plug || !url) return 0; /*disable RTP/RTSP from ffmpeg*/ if (!strnicmp(url, "rtsp://", 7)) return 0; if (!strnicmp(url, "rtspu://", 8)) return 0; if (!strnicmp(url, "rtp://", 6)) return 0; if (!strnicmp(url, "plato://", 8)) return 0; if (!strnicmp(url, "udp://", 6)) return 0; if (!strnicmp(url, "tcp://", 6)) return 0; if (!strnicmp(url, "data:", 5)) return 0; strcpy(szName, url); ext = strrchr(szName, '#'); if (ext) ext[0] = 0; ext = strrchr(szName, '?'); if (ext) ext[0] = 0; ext = strrchr(szName, '.'); if (ext && strlen(ext) > 19) ext = NULL; if (ext && strlen(ext) > 1) { strcpy(szExt, &ext[1]); strlwr(szExt); #ifndef FFMPEG_DEMUX_ENABLE_MPEG2TS if (!strcmp(szExt, "ts")) return 0; #endif /*note we forbid ffmpeg to handle files we support*/ if (!strcmp(szExt, "mp4") || !strcmp(szExt, "mpg4") || !strcmp(szExt, "m4a") || !strcmp(szExt, "m21") || !strcmp(szExt, "m4v") || !strcmp(szExt, "m4a") || !strcmp(szExt, "m4s") || !strcmp(szExt, "3gs") || !strcmp(szExt, "3gp") || !strcmp(szExt, "3gpp") || !strcmp(szExt, "3gp2") || !strcmp(szExt, "3g2") || !strcmp(szExt, "mp3") || !strcmp(szExt, "ac3") || !strcmp(szExt, "amr") || !strcmp(szExt, "bt") || !strcmp(szExt, "wrl") || !strcmp(szExt, "x3dv") || !strcmp(szExt, "xmt") || !strcmp(szExt, "xmta") || !strcmp(szExt, "x3d") || !strcmp(szExt, "jpg") || !strcmp(szExt, "jpeg") || !strcmp(szExt, "png") ) return 0; /*check any default stuff that should work with ffmpeg*/ { u32 i; for (i = 0 ; FFD_MIME_TYPES[i]; i+=3) { if (gf_term_check_extension(plug, FFD_MIME_TYPES[i], FFD_MIME_TYPES[i+1], FFD_MIME_TYPES[i+2], ext)) return 1; } } } ctx = NULL; if (open_file(&ctx, szName, NULL)<0) { AVInputFormat *av_in = NULL;; /*some extensions not supported by ffmpeg*/ if (ext && !strcmp(szExt, "cmp")) av_in = av_find_input_format("m4v"); if (open_file(&ctx, szName, av_in)<0) { return 0; } } if (!ctx || av_find_stream_info(ctx) <0) goto exit; /*figure out if we can use codecs or not*/ has_video = has_audio = 0; for(i = 0; i < (s32)ctx->nb_streams; i++) { AVCodecContext *enc = ctx->streams[i]->codec; switch(enc->codec_type) { case AVMEDIA_TYPE_AUDIO: if (!has_audio) has_audio = 1; break; case AVMEDIA_TYPE_VIDEO: if (!has_video) has_video= 1; break; default: break; } } if (!has_audio && !has_video) goto exit; ret = 1; #if LIBAVFORMAT_VERSION_MAJOR < 53 && LIBAVFORMAT_VERSION_MINOR < 45 fmt_out = guess_stream_format(NULL, url, NULL); #else fmt_out = av_guess_format(NULL, url, NULL); #endif if (fmt_out) gf_term_register_mime_type(plug, fmt_out->mime_type, fmt_out->extensions, fmt_out->name); else { ext = strrchr(szName, '.'); if (ext) { strcpy(szExt, &ext[1]); strlwr(szExt); szExtList = gf_modules_get_option((GF_BaseInterface *)plug, "MimeTypes", "application/x-ffmpeg"); if (!szExtList) { gf_term_register_mime_type(plug, "application/x-ffmpeg", szExt, "Other Movies (FFMPEG)"); } else if (!strstr(szExtList, szExt)) { u32 len; char *buf; len = (u32) (strlen(szExtList) + strlen(szExt) + 10); buf = gf_malloc(sizeof(char)*len); sprintf(buf, "\"%s ", szExt); strcat(buf, &szExtList[1]); gf_modules_set_option((GF_BaseInterface *)plug, "MimeTypes", "application/x-ffmpeg", buf); gf_free(buf); } } } exit: if (ctx) av_close_input_file(ctx); return ret; }
static void svg2bifs_node_start(void *sax_cbck, const char *name, const char *name_space, const GF_XMLAttribute *attributes, u32 nb_attributes) { u32 i; SVG2BIFS_Converter *converter = (SVG2BIFS_Converter *)sax_cbck; SVGPropertiesPointers *backup_props; char *id_string = NULL; u32 tag; SVG_Element *elt; SVG_DeferedAnimation *anim = NULL; tag = gf_xml_get_element_tag(name, 0); elt = (SVG_Element*)gf_node_new(converter->svg_sg, tag); if (!gf_sg_get_root_node(converter->svg_sg)) { gf_node_register((GF_Node *)elt, NULL); gf_sg_set_root_node(converter->svg_sg, (GF_Node *)elt); } else { gf_node_register((GF_Node *)elt, converter->svg_parent); //gf_node_list_add_child(&((GF_ParentNode*)converter->svg_parent)->children, (GF_Node *)elt); } // fprintf(stdout, "Converting %s\n", gf_node_get_class_name((GF_Node *)elt)); // if (converter->bifs_parent) fprintf(stdout, "%s\n", gf_node_get_class_name(converter->bifs_parent)); if (gf_svg_is_animation_tag(tag)) { GF_SAFEALLOC(anim, SVG_DeferedAnimation); /*default anim target is parent node*/ anim->animation_elt = elt; if (converter->svg_parent) { anim->target = anim->anim_parent = (SVG_Element*) converter->svg_parent; } } for (i=0; i<nb_attributes; i++) { GF_XMLAttribute *att = (GF_XMLAttribute *)&attributes[i]; if (!att->value || !strlen(att->value)) continue; if (!stricmp(att->name, "style")) { gf_svg_parse_style((GF_Node *)elt, att->value); } else if (!stricmp(att->name, "id") || !stricmp(att->name, "xml:id")) { gf_svg_parse_element_id((GF_Node *)elt, att->value, 0); id_string = att->value; } else if (anim && !stricmp(att->name, "to")) { anim->to = gf_strdup(att->value); } else if (anim && !stricmp(att->name, "from")) { anim->from = gf_strdup(att->value); } else if (anim && !stricmp(att->name, "by")) { anim->by = gf_strdup(att->value); } else if (anim && !stricmp(att->name, "values")) { anim->values = gf_strdup(att->value); } else if (anim && (tag == TAG_SVG_animateTransform) && !stricmp(att->name, "type")) { anim->type = gf_strdup(att->value); } else { GF_FieldInfo info; if (gf_node_get_field_by_name((GF_Node *)elt, att->name, &info)==GF_OK) { gf_svg_parse_attribute((GF_Node *)elt, &info, att->value, 0); } else { fprintf(stdout, "Skipping attribute %s\n", att->name); } } } if (anim) { svg_parse_animation(converter->svg_sg, anim); } memset(&converter->all_atts, 0, sizeof(SVGAllAttributes)); gf_svg_flatten_attributes(elt, &converter->all_atts); backup_props = gf_malloc(sizeof(SVGPropertiesPointers)); memcpy(backup_props, &converter->svg_props, sizeof(SVGPropertiesPointers)); gf_node_set_private((GF_Node *)elt, backup_props); gf_svg_apply_inheritance(&converter->all_atts, &converter->svg_props); fprintf(stdout, "START\t%s\t%s\t%s", converter->svg_parent ? gf_node_get_class_name(converter->svg_parent) : "none", converter->bifs_parent ? gf_node_get_class_name(converter->bifs_parent) : "none", name); converter->svg_parent = (GF_Node *)elt; if (!gf_sg_get_root_node(converter->bifs_sg)) { if (tag == TAG_SVG_svg) { GF_Node *node, *child; converter->bifs_sg->usePixelMetrics = 1; if (converter->all_atts.width && converter->all_atts.width->type == SVG_NUMBER_VALUE) { converter->bifs_sg->width = FIX2INT(converter->all_atts.width->value); } else { converter->bifs_sg->width = 320; } if (converter->all_atts.height && converter->all_atts.height->type == SVG_NUMBER_VALUE) { converter->bifs_sg->height = FIX2INT(converter->all_atts.height->value); } else { converter->bifs_sg->height = 200; } node = gf_node_new(converter->bifs_sg, TAG_MPEG4_OrderedGroup); gf_node_register(node, NULL); gf_sg_set_root_node(converter->bifs_sg, node); child = gf_node_new(converter->bifs_sg, TAG_MPEG4_QuantizationParameter); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); { M_QuantizationParameter *qp = (M_QuantizationParameter *)child; qp->useEfficientCoding = 1; } /* SVG to BIFS coordinate transformation */ child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Viewport); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); { M_Viewport *vp = (M_Viewport*)child; if (converter->all_atts.viewBox) { vp->size.x = converter->all_atts.viewBox->width; vp->size.y = converter->all_atts.viewBox->height; vp->position.x = converter->all_atts.viewBox->x+converter->all_atts.viewBox->width/2; vp->position.y = -(converter->all_atts.viewBox->y+converter->all_atts.viewBox->height/2); } else { vp->size.x = INT2FIX(converter->bifs_sg->width); vp->size.y = INT2FIX(converter->bifs_sg->height); vp->position.x = INT2FIX(converter->bifs_sg->width)/2; vp->position.y = -INT2FIX(converter->bifs_sg->height)/2; } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Background2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); { M_Background2D *b = (M_Background2D *)child; b->backColor.red = FIX_ONE; b->backColor.green = FIX_ONE; b->backColor.blue = FIX_ONE; } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; { M_Transform2D *tr = (M_Transform2D *)node; tr->scale.y = -FIX_ONE; } converter->bifs_parent = node; } } else { GF_Node *node, *child; node = converter->bifs_parent; switch(tag) { case TAG_SVG_g: { if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; } else { M_Group *g = (M_Group*)gf_node_new(converter->bifs_sg, TAG_MPEG4_Group); gf_node_register((GF_Node *)g, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, (GF_Node *)g); node = (GF_Node *)g; converter->bifs_parent = node; } } break; case TAG_SVG_rect: { Bool is_parent_set = 0; if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; is_parent_set = 1; } if (converter->force_transform) { node = add_transform2d(converter, node); if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } if (converter->all_atts.x || converter->all_atts.y) { child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } { M_Transform2D *tr = (M_Transform2D *)node; if (converter->all_atts.x) tr->translation.x = converter->all_atts.x->value + (converter->all_atts.width?converter->all_atts.width->value/2:0); if (converter->all_atts.y) tr->translation.y = converter->all_atts.y->value + (converter->all_atts.height?converter->all_atts.height->value/2:0); } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) converter->bifs_parent = node; { M_Shape *shape = (M_Shape *)node; shape->geometry = gf_node_new(converter->bifs_sg, TAG_MPEG4_Rectangle); gf_node_register(shape->geometry, (GF_Node *)shape); { M_Rectangle *rect = (M_Rectangle *)shape->geometry; if (converter->all_atts.width) rect->size.x = converter->all_atts.width->value; if (converter->all_atts.height) rect->size.y = converter->all_atts.height->value; } shape->appearance = create_appearance(&converter->svg_props, converter->bifs_sg); gf_node_register(shape->appearance, (GF_Node *)shape); } } break; case TAG_SVG_path: { Bool is_parent_set = 0; if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; is_parent_set = 1; } if (converter->force_transform) { node = add_transform2d(converter, node); if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } if (converter->all_atts.x || converter->all_atts.y) { child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } { M_Transform2D *tr = (M_Transform2D *)node; if (converter->all_atts.x) tr->translation.x = converter->all_atts.x->value; if (converter->all_atts.y) tr->translation.y = converter->all_atts.y->value; } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) converter->bifs_parent = node; { M_Shape *shape = (M_Shape *)node; shape->geometry = gf_node_new(converter->bifs_sg, TAG_MPEG4_XCurve2D); gf_node_register(shape->geometry, (GF_Node *)shape); if (converter->all_atts.d) { M_Coordinate2D *c2d; M_XCurve2D *xc = (M_XCurve2D *)shape->geometry; u32 i, j, c, k; xc->point = gf_node_new(converter->bifs_sg, TAG_MPEG4_Coordinate2D); c2d = (M_Coordinate2D *)xc->point; gf_node_register(xc->point, (GF_Node *)xc); gf_sg_vrml_mf_alloc(&c2d->point, GF_SG_VRML_MFVEC2F, converter->all_atts.d->n_points); gf_sg_vrml_mf_alloc(&xc->type, GF_SG_VRML_MFINT32, converter->all_atts.d->n_points); c = 0; k = 0; j = 0; c2d->point.vals[k] = converter->all_atts.d->points[0]; k++; xc->type.vals[0] = 0; for (i = 1; i < converter->all_atts.d->n_points; ) { switch(converter->all_atts.d->tags[i]) { case GF_PATH_CURVE_ON: c2d->point.vals[k] = converter->all_atts.d->points[i]; k++; if (i-1 == converter->all_atts.d->contours[c]) { xc->type.vals[j] = 0; c++; } else { xc->type.vals[j] = 1; } i++; break; case GF_PATH_CURVE_CUBIC: c2d->point.vals[k] = converter->all_atts.d->points[i]; c2d->point.vals[k+1] = converter->all_atts.d->points[i+1]; c2d->point.vals[k+2] = converter->all_atts.d->points[i+2]; k+=3; xc->type.vals[j] = 2; if (converter->all_atts.d->tags[i+2]==GF_PATH_CLOSE) { j++; xc->type.vals[j] = 6; } i+=3; break; case GF_PATH_CLOSE: xc->type.vals[j] = 6; i++; break; case GF_PATH_CURVE_CONIC: c2d->point.vals[k] = converter->all_atts.d->points[i]; c2d->point.vals[k+1] = converter->all_atts.d->points[i+1]; k+=2; xc->type.vals[j] = 7; if (converter->all_atts.d->tags[i+1]==GF_PATH_CLOSE) { j++; xc->type.vals[j] = 6; } i+=2; break; } j++; } xc->type.count = j; c2d->point.count = k; } shape->appearance = create_appearance(&converter->svg_props, converter->bifs_sg); gf_node_register(shape->appearance, (GF_Node *)shape); } } break; case TAG_SVG_polyline: { Bool is_parent_set = 0; if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; is_parent_set = 1; } if (converter->force_transform) { node = add_transform2d(converter, node); if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) converter->bifs_parent = node; { M_Shape *shape = (M_Shape *)node; shape->geometry = gf_node_new(converter->bifs_sg, TAG_MPEG4_IndexedFaceSet2D); gf_node_register(shape->geometry, (GF_Node *)shape); if (converter->all_atts.points) { M_Coordinate2D *c2d; M_IndexedFaceSet2D *ifs = (M_IndexedFaceSet2D *)shape->geometry; u32 i; ifs->coord = gf_node_new(converter->bifs_sg, TAG_MPEG4_Coordinate2D); c2d = (M_Coordinate2D *)ifs->coord; gf_node_register(ifs->coord, (GF_Node *)ifs); gf_sg_vrml_mf_alloc(&c2d->point, GF_SG_VRML_MFVEC2F, gf_list_count(*converter->all_atts.points)); for (i = 0; i < gf_list_count(*converter->all_atts.points); i++) { SVG_Point *p = (SVG_Point *)gf_list_get(*converter->all_atts.points, i); c2d->point.vals[i].x = p->x; c2d->point.vals[i].y = p->y; } } shape->appearance = create_appearance(&converter->svg_props, converter->bifs_sg); gf_node_register(shape->appearance, (GF_Node *)shape); } } break; case TAG_SVG_text: { Bool is_parent_set = 0; if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; is_parent_set = 1; } if (converter->force_transform) { node = add_transform2d(converter, node); if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); { M_Transform2D *tr = (M_Transform2D *)child; if (converter->all_atts.text_x) tr->translation.x = ((SVG_Coordinate *)gf_list_get(*converter->all_atts.text_x, 0))->value; if (converter->all_atts.text_y) tr->translation.y = ((SVG_Coordinate *)gf_list_get(*converter->all_atts.text_y, 0))->value; tr->scale.y = -FIX_ONE; } node = child; child = NULL; if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) converter->bifs_parent = node; { M_FontStyle *fs; M_Text *text; M_Shape *shape = (M_Shape *)node; text = (M_Text *)gf_node_new(converter->bifs_sg, TAG_MPEG4_Text); shape->geometry = (GF_Node *)text; converter->bifs_text_node = shape->geometry; gf_node_register(shape->geometry, (GF_Node *)shape); fs = (M_FontStyle *)gf_node_new(converter->bifs_sg, TAG_MPEG4_XFontStyle); gf_node_register((GF_Node *)fs, (GF_Node*)text); text->fontStyle = (GF_Node *)fs; gf_sg_vrml_mf_alloc(&fs->family, GF_SG_VRML_MFSTRING, 1); fs->family.vals[0] = gf_strdup(converter->svg_props.font_family->value); fs->size = converter->svg_props.font_size->value; shape->appearance = create_appearance(&converter->svg_props, converter->bifs_sg); gf_node_register(shape->appearance, (GF_Node *)shape); } } break; case TAG_SVG_ellipse: case TAG_SVG_circle: { Bool is_parent_set = 0; if (converter->all_atts.transform) { node = add_transform_matrix(converter, node); converter->bifs_parent = node; is_parent_set = 1; } if (converter->force_transform) { node = add_transform2d(converter, node); if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } if (converter->all_atts.cx || converter->all_atts.cy) { child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); { M_Transform2D *tr = (M_Transform2D *)child; if (converter->all_atts.cx) tr->translation.x = converter->all_atts.cx->value; if (converter->all_atts.cy) tr->translation.y = converter->all_atts.cy->value; } node = child; child = NULL; if (!is_parent_set) { converter->bifs_parent = node; is_parent_set = 1; } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; if (!is_parent_set) converter->bifs_parent = node; { M_Shape *shape = (M_Shape *)node; if (tag == TAG_SVG_ellipse) { M_Ellipse *e = (M_Ellipse *)gf_node_new(converter->bifs_sg, TAG_MPEG4_Ellipse); shape->geometry = (GF_Node *)e; e->radius.x = converter->all_atts.rx->value; e->radius.y = converter->all_atts.ry->value; } else { M_Circle *c = (M_Circle *)gf_node_new(converter->bifs_sg, TAG_MPEG4_Circle); shape->geometry = (GF_Node *)c; c->radius = converter->all_atts.r->value; } gf_node_register(shape->geometry, (GF_Node *)shape); shape->appearance = create_appearance(&converter->svg_props, converter->bifs_sg); gf_node_register(shape->appearance, (GF_Node *)shape); } } break; case TAG_SVG_defs: { child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Switch); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; { M_Switch *sw = (M_Switch *)node; sw->whichChoice = -1; } converter->bifs_parent = node; } break; case TAG_SVG_solidColor: { child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Shape); gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; converter->bifs_parent = node; } break; case TAG_SVG_animateTransform: { GF_Node *child_ts; if (!gf_node_get_id(node)) { gf_node_set_id(node, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL); } child_ts = gf_node_new(converter->bifs_sg, TAG_MPEG4_TimeSensor); if (!gf_node_get_id(child_ts)) { gf_node_set_id(child_ts, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL); } gf_node_register(child_ts, node); gf_node_list_add_child(&((GF_ParentNode *)node)->children, child_ts); { M_TimeSensor *ts = (M_TimeSensor *)child_ts; if (converter->all_atts.dur) { ts->cycleInterval = converter->all_atts.dur->clock_value; } if (converter->all_atts.repeatCount && converter->all_atts.repeatCount->type == SMIL_REPEATCOUNT_INDEFINITE) { ts->loop = 1; } } if (converter->all_atts.transform_type) { GF_FieldInfo fromField, toField; switch (*converter->all_atts.transform_type) { case SVG_TRANSFORM_ROTATE: child = gf_node_new(converter->bifs_sg, TAG_MPEG4_PositionInterpolator2D); if (!gf_node_get_id(child)) { gf_node_set_id(child, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL); } gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode *)node)->children, child); gf_node_get_field_by_name(child_ts, "fraction_changed", &fromField); gf_node_get_field_by_name(child, "set_fraction", &toField); gf_sg_route_new(converter->bifs_sg, child_ts, fromField.fieldIndex, child, toField.fieldIndex); gf_node_get_field_by_name(child, "value_changed", &fromField); gf_node_get_field_by_name(node, "rotationAngle", &toField); gf_sg_route_new(converter->bifs_sg, child, fromField.fieldIndex, node, toField.fieldIndex); { M_PositionInterpolator2D *pi2d = (M_PositionInterpolator2D *)child; if (converter->all_atts.keyTimes) { SFFloat *g; u32 count, i; count = gf_list_count(*converter->all_atts.keyTimes); for (i = 0; i < count; i++) { Fixed *f = gf_list_get(*converter->all_atts.keyTimes, i); gf_sg_vrml_mf_append(&pi2d->key, GF_SG_VRML_MFFLOAT, &g); *g = *f; } } if (converter->all_atts.values) { SFVec2f *g; u32 count, i; count = gf_list_count(converter->all_atts.values->values); for (i = 0; i < count; i++) { SVG_Point_Angle *p; p = gf_list_get(converter->all_atts.values->values, i); gf_sg_vrml_mf_append(&pi2d->keyValue, GF_SG_VRML_MFVEC2F, &g); g->x = p->x; g->y = p->y; } } } child = gf_node_new(converter->bifs_sg, TAG_MPEG4_ScalarInterpolator); if (!gf_node_get_id(child)) { gf_node_set_id(child, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL); } gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode *)node)->children, child); gf_node_get_field_by_name(child_ts, "fraction_changed", &fromField); gf_node_get_field_by_name(child, "set_fraction", &toField); gf_sg_route_new(converter->bifs_sg, child_ts, fromField.fieldIndex, child, toField.fieldIndex); gf_node_get_field_by_name(child, "value_changed", &fromField); gf_node_get_field_by_name(node, "center", &toField); gf_sg_route_new(converter->bifs_sg, child, fromField.fieldIndex, node, toField.fieldIndex); { M_ScalarInterpolator *si = (M_ScalarInterpolator *)child; if (converter->all_atts.keyTimes) { SFFloat *g; u32 count, i; count = gf_list_count(*converter->all_atts.keyTimes); for (i = 0; i < count; i++) { Fixed *f = gf_list_get(*converter->all_atts.keyTimes, i); gf_sg_vrml_mf_append(&si->key, GF_SG_VRML_MFFLOAT, &g); *g = *f; } } if (converter->all_atts.values) { SFFloat *g; u32 count, i; count = gf_list_count(converter->all_atts.values->values); for (i = 0; i < count; i++) { SVG_Point_Angle *p; p = gf_list_get(converter->all_atts.values->values, i); gf_sg_vrml_mf_append(&si->keyValue, GF_SG_VRML_MFFLOAT, &g); *g = p->angle; } } } break; case SVG_TRANSFORM_SCALE: case SVG_TRANSFORM_TRANSLATE: child = gf_node_new(converter->bifs_sg, TAG_MPEG4_PositionInterpolator2D); if (!gf_node_get_id(child)) { gf_node_set_id(child, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL); } gf_node_register(child, node); gf_node_list_add_child(&((GF_ParentNode *)node)->children, child); gf_node_get_field_by_name(child_ts, "fraction_changed", &fromField); gf_node_get_field_by_name(child, "set_fraction", &toField); gf_sg_route_new(converter->bifs_sg, child_ts, fromField.fieldIndex, child, toField.fieldIndex); gf_node_get_field_by_name(child, "value_changed", &fromField); if (*converter->all_atts.transform_type == SVG_TRANSFORM_SCALE) gf_node_get_field_by_name(node, "scale", &toField); else gf_node_get_field_by_name(node, "translation", &toField); gf_sg_route_new(converter->bifs_sg, child, fromField.fieldIndex, node, toField.fieldIndex); { M_PositionInterpolator2D *pi2d = (M_PositionInterpolator2D *)child; if (converter->all_atts.keyTimes) { SFFloat *g; u32 count, i; count = gf_list_count(*converter->all_atts.keyTimes); for (i = 0; i < count; i++) { Fixed *f = gf_list_get(*converter->all_atts.keyTimes, i); gf_sg_vrml_mf_append(&pi2d->key, GF_SG_VRML_MFFLOAT, &g); *g = *f; } } if (converter->all_atts.values) { SFVec2f *g; u32 count, i; count = gf_list_count(converter->all_atts.values->values); for (i = 0; i < count; i++) { SVG_Point *p; p = gf_list_get(converter->all_atts.values->values, i); gf_sg_vrml_mf_append(&pi2d->keyValue, GF_SG_VRML_MFVEC2F, &g); g->x = p->x; g->y = p->y; } } } break; default: fprintf(stdout, "Warning: transformation type not supported \n"); } } //converter->bifs_parent = node; } break; default: { fprintf(stdout, "Warning: element %s not supported \n", gf_node_get_class_name((GF_Node *)elt)); child = gf_node_new(converter->bifs_sg, TAG_MPEG4_Transform2D); gf_node_register(child, node); //gf_node_list_add_child(&((GF_ParentNode*)node)->children, child); node = child; child = NULL; converter->bifs_parent = node; } break; } if (id_string) gf_node_set_id(converter->bifs_parent, gf_sg_get_next_available_node_id(converter->bifs_sg), NULL);//gf_node_get_name((GF_Node *)elt)); } fprintf(stdout, "\t%s\n", converter->bifs_parent ? gf_node_get_class_name(converter->bifs_parent) : "none"); }
//------------------------------- // dir should end with / int CNativeWrapper::init(JNIEnv * env, void * bitmap, jobject * callback, int width, int height, const char * cfg_dir, const char * modules_dir, const char * cache_dir, const char * font_dir, const char * gui_dir, const char * urlToLoad) { LOGI("Initializing GPAC with URL=%s...", urlToLoad); strcpy(m_modules_dir, modules_dir); strcpy(m_cache_dir, cache_dir); strcpy(m_font_dir, font_dir); if (cfg_dir) strcpy(m_cfg_dir, cfg_dir); char m_cfg_filename[GF_MAX_PATH]; if (m_cfg_dir) { LOGI("GPAC.cfg found in %s, force using it.\n", m_cfg_dir); strcpy(m_cfg_filename, m_cfg_dir); strcat(m_cfg_filename, "GPAC.cfg"); } int m_Width = width; int m_Height = height; int first_launch = 0; const char *opt; m_window = env; m_session = bitmap; if (!mainJavaEnv) mainJavaEnv = (JavaEnvTh *) gf_malloc(sizeof(JavaEnvTh)); memset(mainJavaEnv, 0, sizeof(JavaEnvTh)); setJavaEnv(mainJavaEnv, env, env->NewGlobalRef(*callback)); if (pthread_setspecific( jni_thread_env_key, mainJavaEnv)) { LOGE("Failed to set specific thread data to jni_thread_env_key=%d for main thread !", jni_thread_env_key); } m_mx = gf_mx_new("Osmo4"); //load config file LOGI("Loading User Config %s...", "GPAC.cfg"); m_user.config = gf_cfg_init(m_cfg_dir ? m_cfg_filename : NULL, NULL); gf_set_progress_callback(this, Osmo4_progress_cbk); opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory"); LOGI("loading modules in directory %s...", opt); m_user.modules = gf_modules_new(opt, m_user.config); if (!m_user.modules || !gf_modules_get_count(m_user.modules)) { LOGE("No modules found in directory %s !", opt); if (m_user.modules) gf_modules_del(m_user.modules); gf_cfg_del(m_user.config); m_user.config = NULL; return Quit(KErrGeneral); } /*we don't thread the visual compositor to be able to minimize the app and still have audio running*/ m_user.init_flags = GF_TERM_NO_COMPOSITOR_THREAD; m_user.opaque = this; m_user.os_window_handler = m_window; m_user.os_display = m_session; m_user.EventProc = GPAC_EventProc; if (!javaVM) { LOGE("NO JAVA VM FOUND, m_user=%p !!!!\n", &m_user); return Quit(KErrGeneral); } LOGD("Loading GPAC terminal, m_user=%p...", &m_user); gf_sys_init(GF_MemTrackerNone); gf_fm_request_set_callback(this, on_fm_request); SetupLogs(); m_term = gf_term_new(&m_user); if (!m_term) { LOGE("Cannot load GPAC Terminal with m_user=%p", &m_user); MessageBox("Cannot load GPAC terminal", "Fatal Error", GF_SERVICE_ERROR); gf_modules_del(m_user.modules); m_user.modules = NULL; gf_cfg_del(m_user.config); m_user.config = NULL; return Quit(KErrGeneral); } /*force fullscreen*/ gf_term_set_option(m_term, GF_OPT_FULLSCREEN, 1); //setAudioEnvironment(javaVM); LOGD("Setting term size m_user=%p...", &m_user); gf_term_set_size(m_term, m_Width, m_Height); opt = gf_cfg_get_key(m_user.config, "General", "StartupFile"); LOGD("File loaded at startup=%s.", opt); if (!urlToLoad) urlToLoad = opt; if (urlToLoad) { LOGI("Connecting to %s...", urlToLoad); gf_term_connect(m_term, urlToLoad); } debug_log("init end"); LOGD("Saving config file...\n"); gf_cfg_save(m_user.config); LOGI("Initialization complete, config file saved.\n"); return 0; }
GF_Err Media_GetSample(GF_MediaBox *mdia, u32 sampleNumber, GF_ISOSample **samp, u32 *sIDX, Bool no_data, u64 *out_offset) { GF_Err e; u32 bytesRead; u32 dataRefIndex, chunkNumber; u64 offset, new_size; u8 isEdited; GF_SampleEntryBox *entry; if (!mdia || !mdia->information->sampleTable) return GF_BAD_PARAM; //OK, here we go.... if (sampleNumber > mdia->information->sampleTable->SampleSize->sampleCount) return GF_BAD_PARAM; //get the DTS e = stbl_GetSampleDTS(mdia->information->sampleTable->TimeToSample, sampleNumber, &(*samp)->DTS); if (e) return e; //the CTS offset if (mdia->information->sampleTable->CompositionOffset) { e = stbl_GetSampleCTS(mdia->information->sampleTable->CompositionOffset , sampleNumber, &(*samp)->CTS_Offset); if (e) return e; } else { (*samp)->CTS_Offset = 0; } //the size e = stbl_GetSampleSize(mdia->information->sampleTable->SampleSize, sampleNumber, &(*samp)->dataLength); if (e) return e; //the RAP if (mdia->information->sampleTable->SyncSample) { e = stbl_GetSampleRAP(mdia->information->sampleTable->SyncSample, sampleNumber, &(*samp)->IsRAP, NULL, NULL); if (e) return e; } else { //if no SyncSample, all samples are sync (cf spec) (*samp)->IsRAP = 1; } /*overwrite sync sample with sample dep if any*/ if (mdia->information->sampleTable->SampleDep) { u32 dependsOn, dependedOn, redundant; e = stbl_GetSampleDepType(mdia->information->sampleTable->SampleDep, sampleNumber, &dependsOn, &dependedOn, &redundant); if (!e) { if (dependsOn==1) (*samp)->IsRAP = 0; else if (dependsOn==2) (*samp)->IsRAP = 1; /*if not depended upon and redundant, mark as carousel sample*/ if ((dependedOn==2) && (redundant==1)) (*samp)->IsRAP = 2; /*TODO FIXME - we must enhance the IsRAP semantics to carry disposable info ... */ } } /*get sync shadow*/ if (Media_IsSampleSyncShadow(mdia->information->sampleTable->ShadowSync, sampleNumber)) (*samp)->IsRAP = 2; //the data info if (!sIDX && !no_data) return GF_BAD_PARAM; if (!sIDX && !out_offset) return GF_OK; (*sIDX) = 0; e = stbl_GetSampleInfos(mdia->information->sampleTable, sampleNumber, &offset, &chunkNumber, sIDX, &isEdited); if (e) return e; //then get the DataRef e = Media_GetSampleDesc(mdia, *sIDX, &entry, &dataRefIndex); if (e) return e; // Open the data handler - check our mode, don't reopen in read only if this is //the same entry. In other modes we have no choice because the main data map is //divided into the original and the edition files if (mdia->mediaTrack->moov->mov->openMode == GF_ISOM_OPEN_READ) { //same as last call in read mode if (!mdia->information->dataHandler) { e = gf_isom_datamap_open(mdia, dataRefIndex, isEdited); if (e) return e; } if (mdia->information->dataEntryIndex != dataRefIndex) mdia->information->dataEntryIndex = dataRefIndex; } else { e = gf_isom_datamap_open(mdia, dataRefIndex, isEdited); if (e) return e; } if (out_offset) *out_offset = offset; if (no_data) return GF_OK; /*and finally get the data, include padding if needed*/ (*samp)->data = (char *) gf_malloc(sizeof(char) * ( (*samp)->dataLength + mdia->mediaTrack->padding_bytes) ); if (mdia->mediaTrack->padding_bytes) memset((*samp)->data + (*samp)->dataLength, 0, sizeof(char) * mdia->mediaTrack->padding_bytes); //check if we can get the sample (make sure we have enougth data...) new_size = gf_bs_get_size(mdia->information->dataHandler->bs); if (offset + (*samp)->dataLength > new_size) { //always refresh the size to avoid wrong info on http/ftp new_size = gf_bs_get_refreshed_size(mdia->information->dataHandler->bs); if (offset + (*samp)->dataLength > new_size) { mdia->BytesMissing = offset + (*samp)->dataLength - new_size; return GF_ISOM_INCOMPLETE_FILE; } } bytesRead = gf_isom_datamap_get_data(mdia->information->dataHandler, (*samp)->data, (*samp)->dataLength, offset); //if bytesRead != sampleSize, we have an IO err if (bytesRead < (*samp)->dataLength) { return GF_IO_ERR; } mdia->BytesMissing = 0; //finally rewrite the sample if this is an OD Access Unit if (mdia->handler->handlerType == GF_ISOM_MEDIA_OD) { e = Media_RewriteODFrame(mdia, *samp); if (e) return e; } /*FIXME: we don NOT rewrite sample if we have a encrypted track*/ else if (gf_isom_is_nalu_based_entry(mdia, entry) && !gf_isom_is_track_encrypted(mdia->mediaTrack->moov->mov, gf_isom_get_tracknum_from_id(mdia->mediaTrack->moov, mdia->mediaTrack->Header->trackID)) ) { e = gf_isom_nalu_sample_rewrite(mdia, *samp, sampleNumber, (GF_MPEGVisualSampleEntryBox *)entry); if (e) return e; } else if (mdia->mediaTrack->moov->mov->convert_streaming_text && ((mdia->handler->handlerType == GF_ISOM_MEDIA_TEXT) || (mdia->handler->handlerType == GF_ISOM_MEDIA_SUBT)) && (entry->type == GF_ISOM_BOX_TYPE_TX3G || entry->type == GF_ISOM_BOX_TYPE_TEXT) ) { u64 dur; if (sampleNumber == mdia->information->sampleTable->SampleSize->sampleCount) { dur = mdia->mediaHeader->duration - (*samp)->DTS; } else { stbl_GetSampleDTS(mdia->information->sampleTable->TimeToSample, sampleNumber+1, &dur); dur -= (*samp)->DTS; } e = gf_isom_rewrite_text_sample(*samp, *sIDX, (u32) dur); if (e) return e; } return GF_OK; }
void RP_LoadSDP(RTPClient *rtp, char *sdp_text, u32 sdp_len, RTPStream *stream) { GF_Err e; u32 i; GF_SDPInfo *sdp; Bool is_isma_1, has_iod; char *iod_str; GF_X_Attribute *att; is_isma_1 = 0; iod_str = NULL; sdp = gf_sdp_info_new(); e = gf_sdp_info_parse(sdp, sdp_text, sdp_len); if (e == GF_OK) e = RP_SetupSDP(rtp, sdp, stream); /*root SDP, attach service*/ if (! stream) { /*look for IOD*/ if (e==GF_OK) { i=0; while ((att = (GF_X_Attribute*)gf_list_enum(sdp->Attributes, &i))) { if (!iod_str && !strcmp(att->Name, "mpeg4-iod") ) iod_str = att->Value; if (!is_isma_1 && !strcmp(att->Name, "isma-compliance") ) { if (!stricmp(att->Value, "1,1.0,1")) is_isma_1 = 1; } } /*force iod reconstruction with ISMA to use proper clock dependencies*/ if (is_isma_1) iod_str = NULL; /*some folks have weird notions of MPEG-4 systems, they use hardcoded IOD with AAC ESD even when streaming AMR...*/ if (iod_str) { RTPStream *ch; i=0; while ((ch = (RTPStream *)gf_list_enum(rtp->channels, &i))) { if ((ch->depacketizer->payt==GF_RTP_PAYT_AMR) || (ch->depacketizer->payt==GF_RTP_PAYT_AMR_WB) ) { iod_str = NULL; break; } } } if (!iod_str) { RTPStream *ch; Bool needs_iod = 0; i=0; while ((ch = (RTPStream *)gf_list_enum(rtp->channels, &i))) { if ((ch->depacketizer->payt==GF_RTP_PAYT_MPEG4) && (ch->depacketizer->sl_map.StreamType==GF_STREAM_SCENE) // || ((ch->depacketizer->payt==GF_RTP_PAYT_3GPP_DIMS) && (ch->depacketizer->sl_map.StreamType==GF_STREAM_SCENE)) ) { needs_iod = 1; break; } } if (needs_iod) { rtp->session_desc = (GF_Descriptor *)RP_GetChannelOD(ch, 0); } } if (iod_str) e = RP_SDPLoadIOD(rtp, iod_str); } /*attach service*/ has_iod = rtp->session_desc ? 1 : 0; gf_term_on_connect(rtp->service, NULL, e); if (!e && !has_iod && !rtp->media_type) RP_SetupObjects(rtp); rtp->media_type = 0; } /*channel SDP */ else { if (e) { gf_term_on_connect(rtp->service, stream->channel, e); stream->status = RTP_Unavailable; } else { /*connect*/ RP_SetupChannel(stream, NULL); } } /*store SDP for later session migration*/ if (sdp) { char *buf=NULL; gf_sdp_info_write(sdp, &buf); if (buf) { rtp->session_state_data = gf_malloc(sizeof(char) * (strlen("data:application/sdp,") + strlen(buf) + 1) ); strcpy(rtp->session_state_data, "data:application/sdp,"); strcat(rtp->session_state_data, buf); gf_free(buf); } gf_sdp_info_del(sdp); } }
GF_EXPORT GF_Err gf_node_store_embedded_data(XMLRI *iri, const char *cache_dir, const char *base_filename) { char szFile[GF_MAX_PATH], buf[20], *sep, *data, *ext; u32 data_size, idx; Bool existing; FILE *f; if (!cache_dir || !base_filename || !iri || !iri->string || strncmp(iri->string, "data:", 5)) return GF_OK; /*handle "data:" scheme when cache is specified*/ strcpy(szFile, cache_dir); data_size = (u32) strlen(szFile); if (szFile[data_size-1] != GF_PATH_SEPARATOR) { szFile[data_size] = GF_PATH_SEPARATOR; szFile[data_size+1] = 0; } if (base_filename) { sep = strrchr(base_filename, GF_PATH_SEPARATOR); #ifdef WIN32 if (!sep) sep = strrchr(base_filename, '/'); #endif if (!sep) sep = (char *) base_filename; else sep += 1; strcat(szFile, sep); } sep = strrchr(szFile, '.'); if (sep) sep[0] = 0; strcat(szFile, "_img_"); /*get mime type*/ sep = (char *)iri->string + 5; if (!strncmp(sep, "image/jpg", 9) || !strncmp(sep, "image/jpeg", 10)) ext = ".jpg"; else if (!strncmp(sep, "image/png", 9)) ext = ".png"; else if (!strncmp(sep, "image/svg+xml", 13)) ext = ".svg"; else return GF_BAD_PARAM; data = NULL; sep = strchr(iri->string, ';'); if (!strncmp(sep, ";base64,", 8)) { sep += 8; data_size = 2 * (u32) strlen(sep); data = (char*)gf_malloc(sizeof(char)*data_size); if (!data) return GF_OUT_OF_MEM; data_size = gf_base64_decode(sep, (u32) strlen(sep), data, data_size); } else if (!strncmp(sep, ";base16,", 8)) { data_size = 2 * (u32) strlen(sep); data = (char*)gf_malloc(sizeof(char)*data_size); if (!data) return GF_OUT_OF_MEM; sep += 8; data_size = gf_base16_decode(sep, (u32) strlen(sep), data, data_size); } if (!data_size) return GF_OK; iri->type = XMLRI_STRING; existing = 0; idx = 0; while (1) { u32 res = check_existing_file(szFile, ext, data, data_size, idx); if (!res) break; if (res==2) { existing = 1; break; } idx++; } sprintf(buf, "%04X", idx); strcat(szFile, buf); strcat(szFile, ext); if (!existing) { f = gf_f64_open(szFile, "wb"); if (!f) { gf_free(data); gf_free(iri->string); iri->string = NULL; return GF_IO_ERR; } gf_fwrite(data, data_size, 1, f); fclose(f); } gf_free(data); gf_free(iri->string); iri->string = gf_strdup(szFile); return GF_OK; }
void RP_SaveSessionState(RTPClient *rtp) { GF_Err e; char *sdp_buf; const char *opt; GF_X_Attribute*att; u32 i, j; GF_SDPInfo *sdp; RTSPSession *sess = NULL; if (!rtp->session_state_data) return; sdp_buf = rtp->session_state_data + strlen("data:application/sdp,"); sdp = gf_sdp_info_new(); e = gf_sdp_info_parse(sdp, sdp_buf, strlen(sdp_buf) ); for (i=0; i<gf_list_count(rtp->channels); i++) { GF_SDPMedia *media = NULL; RTPStream *ch = gf_list_get(rtp->channels, i); if (!ch->control) continue; for (j=0; j<gf_list_count(sdp->media_desc); j++) { u32 k; GF_SDPMedia *med = (GF_SDPMedia*)gf_list_get(sdp->media_desc, j); for (k=0; k<gf_list_count(med->Attributes); k++) { att = (GF_X_Attribute*)gf_list_get(med->Attributes, k); if (!stricmp(att->Name, "control") && (strstr(att->Value, ch->control)!=NULL) ) { media = med; break; } } if (media) break; } if (!media) continue; if (ch->rtp_ch->net_info.IsUnicast) { char szPorts[4096]; u16 porta, portb; media->PortNumber = ch->rtp_ch->net_info.client_port_first; /*remove x-server-port extension*/ for (j=0; j<gf_list_count(media->Attributes); j++) { att = (GF_X_Attribute*)gf_list_get(media->Attributes, j); if (!stricmp(att->Name, "x-stream-state") ) { gf_free(att->Name); gf_free(att->Value); gf_free(att); gf_list_rem(media->Attributes, j); } } ch->current_start += gf_rtp_get_current_time(ch->rtp_ch); GF_SAFEALLOC(att, GF_X_Attribute); att->Name = gf_strdup("x-stream-state"); porta = ch->rtp_ch->net_info.port_first ? ch->rtp_ch->net_info.port_first : ch->rtp_ch->net_info.client_port_first; portb = ch->rtp_ch->net_info.port_last ? ch->rtp_ch->net_info.port_last : ch->rtp_ch->net_info.client_port_last; sprintf(szPorts, "server-port=%d-%d;ssrc=%X;npt=%g;seq=%d;rtptime=%d", porta, portb, ch->rtp_ch->SenderSSRC, ch->current_start, ch->rtp_ch->rtp_first_SN, ch->rtp_ch->rtp_time ); att->Value = gf_strdup(szPorts); gf_list_add(media->Attributes, att); if (ch->rtsp) sess = ch->rtsp; } else { media->PortNumber = ch->rtp_ch->net_info.port_first; } } /*remove x-server-port/x-session-id extension*/ for (j=0; j<gf_list_count(sdp->Attributes); j++) { att = (GF_X_Attribute*)gf_list_get(sdp->Attributes, j); if (!stricmp(att->Name, "x-session-id") || !stricmp(att->Name, "x-session-name") ) { gf_free(att->Name); gf_free(att->Value); gf_free(att); gf_list_rem(sdp->Attributes, j); } } if (sess) { char szURL[4096]; if (sess->session_id) { GF_SAFEALLOC(att, GF_X_Attribute); att->Name = gf_strdup("x-session-id"); att->Value = gf_strdup(sess->session_id); gf_list_add(sdp->Attributes, att); } GF_SAFEALLOC(att, GF_X_Attribute); att->Name = gf_strdup("x-session-name"); sprintf(szURL, "rtsp://%s:%d/%s", sess->session->Server, sess->session->Port, sess->session->Service); att->Value = gf_strdup(szURL); gf_list_add(sdp->Attributes, att); } gf_free(rtp->session_state_data); sdp_buf = NULL; gf_sdp_info_write(sdp, &sdp_buf); if (sdp_buf) { rtp->session_state_data = gf_malloc(sizeof(char) * (strlen("data:application/sdp,") + strlen(sdp_buf) + 1) ); strcpy(rtp->session_state_data, "data:application/sdp,"); strcat(rtp->session_state_data, sdp_buf); gf_free(sdp_buf); } gf_sdp_info_del(sdp); opt = (char *) gf_modules_get_option((GF_BaseInterface *) gf_term_get_service_interface(rtp->service), "Streaming", "SessionMigrationServer"); if (opt) { if (rtp->dnload) gf_term_download_del(rtp->dnload); rtp->dnload = NULL; if (strnicmp(opt, "http://", 7)) { rtp->dnload = gf_term_download_new(rtp->service, opt, GF_NETIO_SESSION_NOT_THREADED, MigrateSDP_NetIO, rtp); while (1) { char buffer[100]; u32 read; e = gf_dm_sess_fetch_data(rtp->dnload, buffer, 100, &read); if (e && (e!=GF_IP_NETWORK_EMPTY)) break; } gf_term_download_del(rtp->dnload); rtp->dnload = NULL; } else { FILE *f = gf_f64_open(opt, "wt"); if (f) { sdp_buf = rtp->session_state_data + strlen("data:application/sdp,"); gf_fwrite(sdp_buf, 1, strlen(sdp_buf), f); fclose(f); } else { e = GF_IO_ERR; } } if (e<0) { gf_term_on_message(sess->owner->service, e, "Error saving session state"); } } }
GF_SceneGraph *gf_inline_get_proto_lib(void *_is, MFURL *lib_url) { GF_ProtoLink *pl; u32 i; GF_Scene *scene = (GF_Scene *) _is; if (!scene || !lib_url->count) return NULL; if (gf_inline_is_hardcoded_proto(lib_url, scene->root_od->term->user->config)) return GF_SG_INTERNAL_PROTO; i=0; while ((pl = (GF_ProtoLink*)gf_list_enum(scene->extern_protos, &i))) { if (!pl->mo) continue; if (gf_mo_get_od_id(pl->url) != GF_MEDIA_EXTERNAL_ID) { if (gf_mo_get_od_id(pl->url) == gf_mo_get_od_id(lib_url)) { if (!pl->mo->odm || !pl->mo->odm->subscene) return NULL; return pl->mo->odm->subscene->graph; } } } /*for string URL based protos, recursively check until top if the proto lib is not already present*/ if (lib_url->vals[0].url) { GF_Scene *check_scene = scene; while (check_scene) { i=0; while ((pl = (GF_ProtoLink*)gf_list_enum(check_scene->extern_protos, &i))) { char *url1, *url2; Bool ok; if (!pl->mo) continue; if (gf_mo_get_od_id(pl->url) != GF_MEDIA_EXTERNAL_ID) continue; /*not the same url*/ if (!gf_mo_is_same_url(pl->mo, lib_url, NULL, 0)) continue; /*check the url path is the same*/ url1 = gf_url_concatenate(pl->mo->odm->net_service->url, lib_url->vals[0].url); url2 = gf_url_concatenate(scene->root_od->net_service->url, lib_url->vals[0].url); ok = 0; if (url1 && url2 && !strcmp(url1, url2)) ok=1; if (url1) gf_free(url1); if (url2) gf_free(url2); if (!ok) continue; if (!pl->mo->odm || !pl->mo->odm->subscene) return NULL; return pl->mo->odm->subscene->graph; } check_scene = check_scene->root_od->parentscene; } } /*not found, let's try to load it*/ if (!lib_url || !lib_url->count) return NULL; /*internal, don't waste ressources*/ if (gf_inline_is_hardcoded_proto(lib_url, scene->root_od->term->user->config)) return NULL; i=0; while ((pl = (GF_ProtoLink*)gf_list_enum(scene->extern_protos, &i)) ) { if (pl->url == lib_url) return NULL; if (pl->url->vals[0].OD_ID && (pl->url->vals[0].OD_ID == lib_url->vals[0].OD_ID)) return NULL; if (pl->url->vals[0].url && lib_url->vals[0].url && !stricmp(pl->url->vals[0].url, lib_url->vals[0].url) ) return NULL; } pl = (GF_ProtoLink*)gf_malloc(sizeof(GF_ProtoLink)); pl->url = lib_url; gf_list_add(scene->extern_protos, pl); pl->mo = gf_scene_get_media_object(scene, lib_url, GF_MEDIA_OBJECT_SCENE, 0); /*this may already be destroyed*/ if (pl->mo) gf_mo_play(pl->mo, 0, -1, 0); /*and return NULL*/ return NULL; }
GF_EXPORT GF_BaseInterface *gf_modules_load_interface(GF_ModuleManager *pm, u32 whichplug, u32 InterfaceFamily) { const char *opt; char szKey[32]; ModuleInstance *inst; GF_BaseInterface *ifce; if (!pm) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] gf_modules_load_interface() : No Module Manager set\n")); return NULL; } gf_mx_p(pm->mutex); inst = (ModuleInstance *) gf_list_get(pm->plug_list, whichplug); if (!inst) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] gf_modules_load_interface() : no module %d exist.\n", whichplug)); gf_mx_v(pm->mutex); return NULL; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface...%s\n", inst->name)); /*look in cache*/ if (!pm->cfg) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] No pm->cfg has been set !!!\n")); gf_mx_v(pm->mutex); return NULL; } opt = gf_cfg_get_key(pm->cfg, "PluginsCache", inst->name); if (opt) { const char * ifce_str = gf_4cc_to_str(InterfaceFamily); snprintf(szKey, 32, "%s:yes", ifce_str ? ifce_str : "(null)"); if (!strstr(opt, szKey)) { gf_mx_v(pm->mutex); return NULL; } } if (!gf_modules_load_library(inst)) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Cannot load library %s\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); gf_mx_v(pm->mutex); return NULL; } if (!inst->query_func) { GF_LOG(GF_LOG_ERROR, GF_LOG_CORE, ("[Core] Library %s missing GPAC export symbols\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); goto err_exit; } /*build cache*/ if (!opt) { u32 i; Bool found = GF_FALSE; char *key; const u32 *si = inst->query_func(); if (!si) { GF_LOG(GF_LOG_WARNING, GF_LOG_CORE, ("[Core] GPAC module %s has no supported interfaces - disabling\n", inst->name)); gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, "Invalid Plugin"); goto err_exit; } i=0; while (si[i]) i++; key = (char*)gf_malloc(sizeof(char) * 10 * i); key[0] = 0; i=0; while (si[i]) { snprintf(szKey, 32, "%s:yes ", gf_4cc_to_str(si[i])); strcat(key, szKey); if (InterfaceFamily==si[i]) found = GF_TRUE; i++; } gf_cfg_set_key(pm->cfg, "PluginsCache", inst->name, key); gf_free(key); if (!found) goto err_exit; } if (!inst->query_func || !inst->query_func(InterfaceFamily) ) goto err_exit; ifce = (GF_BaseInterface *) inst->load_func(InterfaceFamily); /*sanity check*/ if (!ifce) goto err_exit; if (!ifce->module_name || (ifce->InterfaceType != InterfaceFamily)) { inst->destroy_func(ifce); goto err_exit; } gf_list_add(inst->interfaces, ifce); /*keep track of parent*/ ifce->HPLUG = inst; GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s DONE.\n", inst->name)); gf_mx_v(pm->mutex); return ifce; err_exit: GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s exit label, freing library...\n", inst->name)); gf_modules_unload_library(inst); GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("[Core] Load interface %s EXIT.\n", inst->name)); gf_mx_v(pm->mutex); return NULL; }
static GF_Err ft_set_font(GF_FontReader *dr, const char *OrigFontName, u32 styles) { char *fname; char *fontName; const char *opt; FTBuilder *ftpriv = (FTBuilder *)dr->udta; fontName = (char *) OrigFontName; ftpriv->active_face = NULL; if (!fontName || !strlen(fontName) || !stricmp(fontName, "SERIF")) { fontName = ftpriv->font_serif; } else if (!stricmp(fontName, "SANS") || !stricmp(fontName, "sans-serif")) { fontName = ftpriv->font_sans; } else if (!stricmp(fontName, "TYPEWRITER") || !stricmp(fontName, "monospace")) { fontName = ftpriv->font_fixed; } /*first look in loaded fonts*/ ftpriv->active_face = ft_font_in_cache(ftpriv, fontName, styles); if (ftpriv->active_face) return GF_OK; /*check cfg file - gf_free(type is slow at loading fonts so we keep the (font name + styles)=fontfile associations in the cfg file*/ if (!fontName || !strlen(fontName)) return GF_NOT_SUPPORTED; fname = gf_malloc(sizeof(char) * (strlen(fontName) + 50)); { int checkStyles = (styles & GF_FONT_WEIGHT_BOLD) | (styles & GF_FONT_ITALIC); checkFont: strcpy(fname, fontName); if (styles & GF_FONT_WEIGHT_BOLD & checkStyles) strcat(fname, " Bold"); if (styles & GF_FONT_ITALIC & checkStyles) strcat(fname, " Italic"); opt = gf_modules_get_option((GF_BaseInterface *)dr, "FontEngine", fname); if (opt) { FT_Face face; gf_free(fname); if (FT_New_Face(ftpriv->library, opt, 0, & face )) return GF_IO_ERR; if (!face) return GF_IO_ERR; gf_list_add(ftpriv->loaded_fonts, face); ftpriv->active_face = face; return GF_OK; } if (checkStyles){ /* If we tried font + bold + italic -> we will try font + [bold | italic] If we tried font + [bold | italic] -> we try font */ if (checkStyles == (GF_FONT_WEIGHT_BOLD | GF_FONT_ITALIC)) checkStyles = GF_FONT_WEIGHT_BOLD; else if (checkStyles == GF_FONT_WEIGHT_BOLD && (styles & GF_FONT_ITALIC)) checkStyles = GF_FONT_ITALIC; else if (checkStyles == GF_FONT_WEIGHT_BOLD || checkStyles == GF_FONT_ITALIC) checkStyles = 0; goto checkFont; } } GF_LOG(GF_LOG_WARNING, GF_LOG_PARSER, ("[FreeType] Font '%s' (%s) not found\n", fontName, fname)); gf_free(fname); return GF_NOT_SUPPORTED; }
GF_AbstractTSMuxer * ts_amux_new(GF_AVRedirect * avr, u32 videoBitrateInBitsPerSec, u32 width, u32 height, u32 audioBitRateInBitsPerSec) { GF_AbstractTSMuxer * ts = (GF_AbstractTSMuxer*)gf_malloc( sizeof(GF_AbstractTSMuxer)); memset( ts, 0, sizeof( GF_AbstractTSMuxer)); ts->oc = avformat_alloc_context(); ts->destination = avr->destination; av_register_all(); ts->oc->oformat = GUESS_FORMAT(NULL, avr->destination, NULL); if (!ts->oc->oformat) ts->oc->oformat = GUESS_FORMAT("mpegts", NULL, NULL); assert( ts->oc->oformat); #if REDIRECT_AV_AUDIO_ENABLED #ifdef FF_API_AVFRAME_LAVC ts->audio_st = avformat_new_stream(ts->oc, avr->audioCodec); #else ts->audio_st = av_new_stream(ts->oc, avr->audioCodec->id); #endif { AVCodecContext * c = ts->audio_st->codec; c->codec_id = avr->audioCodec->id; c->codec_type = AVMEDIA_TYPE_AUDIO; /* put sample parameters */ c->sample_fmt = SAMPLE_FMT_S16; c->bit_rate = audioBitRateInBitsPerSec; c->sample_rate = avr->audioSampleRate; c->channels = 2; c->time_base.num = 1; c->time_base.den = 1000; // some formats want stream headers to be separate if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; } #endif #ifdef FF_API_AVFRAME_LAVC ts->video_st = avformat_new_stream(ts->oc, avr->videoCodec); #else ts->video_st = av_new_stream(ts->oc, avr->videoCodec->id); #endif { AVCodecContext * c = ts->video_st->codec; c->codec_id = avr->videoCodec->id; c->codec_type = AVMEDIA_TYPE_VIDEO; /* put sample parameters */ c->bit_rate = videoBitrateInBitsPerSec; /* resolution must be a multiple of two */ c->width = width; c->height = height; /* time base: this is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented. for fixed-fps content, timebase should be 1/framerate and timestamp increments should be identically 1. */ c->time_base.den = STREAM_FRAME_RATE; c->time_base.num = 1; c->gop_size = 12; /* emit one intra frame every twelve frames at most */ c->pix_fmt = STREAM_PIX_FMT; if (c->codec_id == CODEC_ID_MPEG2VIDEO) { /* just for testing, we also add B frames */ c->max_b_frames = 2; } if (c->codec_id == CODEC_ID_MPEG1VIDEO) { /* Needed to avoid using macroblocks in which some coeffs overflow. This does not happen with normal video, it just happens here as the motion of the chroma plane does not match the luma plane. */ c->mb_decision=2; } // some formats want stream headers to be separate if (ts->oc->oformat->flags & AVFMT_GLOBALHEADER) c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; } //av_set_pts_info(ts->audio_st, 33, 1, audioBitRateInBitsPerSec); #ifndef AVIO_FLAG_WRITE /* set the output parameters (must be done even if no parameters). */ if (av_set_parameters(ts->oc, NULL) < 0) { fprintf(stderr, "Invalid output format parameters\n"); return NULL; } #endif dump_format(ts->oc, 0, avr->destination, 1); GF_LOG(GF_LOG_INFO, GF_LOG_MODULE, ("[AVRedirect] DUMPING to %s...\n", ts->destination)); #if (LIBAVCODEC_VERSION_MAJOR<55) if (avcodec_open(ts->video_st->codec, avr->videoCodec) < 0) { #else if (avcodec_open2(ts->video_st->codec, avr->videoCodec, NULL) < 0) { #endif GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open video codec\n")); return NULL; } #if REDIRECT_AV_AUDIO_ENABLED #if (LIBAVCODEC_VERSION_MAJOR<55) if (avcodec_open(ts->audio_st->codec, avr->audioCodec) < 0) { #else if (avcodec_open2(ts->audio_st->codec, avr->audioCodec, NULL) < 0) { #endif GF_LOG(GF_LOG_ERROR, GF_LOG_MODULE, ("[AVRedirect] failed to open audio codec\n")); return NULL; } ts->audioMx = gf_mx_new("TS_AudioMx"); #endif ts->videoMx = gf_mx_new("TS_VideoMx"); ts->tsEncodingThread = gf_th_new("ts_interleave_thread_run"); ts->encode = GF_TRUE; ts->audioPackets = NULL; ts->videoPackets = NULL; gf_th_run(ts->tsEncodingThread, ts_interleave_thread_run, ts); return ts; } void ts_amux_del(GF_AbstractTSMuxer * muxerToDelete) { if (!muxerToDelete) return; muxerToDelete->encode = GF_FALSE; gf_sleep(100); gf_th_stop(muxerToDelete->tsEncodingThread); muxerToDelete->tsEncodingThread = NULL; #if REDIRECT_AV_AUDIO_ENABLED gf_mx_del(muxerToDelete->audioMx); muxerToDelete->audioMx = NULL; #endif gf_mx_del(muxerToDelete->videoMx); muxerToDelete->videoMx = NULL; if (muxerToDelete->video_st) { avcodec_close(muxerToDelete->video_st->codec); muxerToDelete->video_st = NULL; } #if REDIRECT_AV_AUDIO_ENABLED if (muxerToDelete->audio_st) { avcodec_close(muxerToDelete->audio_st->codec); muxerToDelete->audio_st = NULL; } #endif /* write the trailer, if any. the trailer must be written * before you close the CodecContexts open when you wrote the * header; otherwise write_trailer may try to use memory that * was freed on av_codec_close() */ if (muxerToDelete->oc) { u32 i; /* free the streams */ for (i = 0; i < muxerToDelete->oc->nb_streams; i++) { av_freep(&muxerToDelete->oc->streams[i]->codec); av_freep(&muxerToDelete->oc->streams[i]); } /* free the stream */ av_free(muxerToDelete->oc); muxerToDelete->oc = NULL; } } Bool ts_encode_audio_frame(GF_AbstractTSMuxer * ts, uint8_t * data, int encoded, u64 pts) { AVPacketList *pl; AVPacket * pkt; if (!ts->encode) return GF_TRUE; pl = (AVPacketList*)gf_malloc(sizeof(AVPacketList)); pl->next = NULL; pkt = &(pl->pkt); av_init_packet(pkt); assert( ts->audio_st); assert( ts->audio_st->codec); pkt->flags = 0; if (ts->audio_st->codec->coded_frame) { if (ts->audio_st->codec->coded_frame->key_frame) pkt->flags = AV_PKT_FLAG_KEY; if (ts->audio_st->codec->coded_frame->pts != AV_NOPTS_VALUE) { pkt->pts = av_rescale_q(ts->audio_st->codec->coded_frame->pts, ts->audio_st->codec->time_base, ts->audio_st->time_base); } else { if (pts == AV_NOPTS_VALUE) pkt->pts = AV_NOPTS_VALUE; else { pkt->pts = av_rescale_q(pts, ts->audio_st->codec->time_base, ts->audio_st->time_base); } } } else { if (pts == AV_NOPTS_VALUE) pkt->pts = AV_NOPTS_VALUE; else pkt->pts = av_rescale_q(pts, ts->audio_st->codec->time_base, ts->audio_st->time_base); } pkt->stream_index= ts->audio_st->index; pkt->data = data; pkt->size = encoded; //fprintf(stderr, "AUDIO PTS="LLU" was: "LLU" (%p)\n", pkt->pts, pts, pl); gf_mx_p(ts->audioMx); if (!ts->audioPackets) ts->audioPackets = pl; else { AVPacketList * px = ts->audioPackets; while (px->next) px = px->next; px->next = pl; } gf_mx_v(ts->audioMx); return GF_FALSE; } Bool ts_encode_video_frame(GF_AbstractTSMuxer* ts, uint8_t* data, int encoded) { AVPacketList *pl; AVPacket * pkt; if (!ts->encode) return GF_TRUE; pl = (AVPacketList*)gf_malloc(sizeof(AVPacketList)); pl->next = NULL; pkt = &(pl->pkt); av_init_packet(pkt); if (ts->video_st->codec->coded_frame->pts != AV_NOPTS_VALUE) { //pkt->pts= av_rescale_q(ts->video_st->codec->coded_frame->pts, ts->video_st->codec->time_base, ts->video_st->time_base); pkt->pts = ts->video_st->codec->coded_frame->pts * ts->video_st->time_base.den / ts->video_st->time_base.num / 1000; //pkt->pts = ts->video_st->codec->coded_frame->pts; } if (ts->video_st->codec->coded_frame->key_frame) pkt->flags |= AV_PKT_FLAG_KEY; pkt->stream_index= ts->video_st->index; pkt->data= data; pkt->size= encoded; //fprintf(stderr, "VIDEO PTS="LLU" was: "LLU" (%p)\n", pkt->pts, ts->video_st->codec->coded_frame->pts, pl); gf_mx_p(ts->videoMx); if (!ts->videoPackets) ts->videoPackets = pl; else { AVPacketList * px = ts->videoPackets; while (px->next) px = px->next; px->next = pl; } gf_mx_v(ts->videoMx); return GF_FALSE; }
void gf_rtsp_set_response_value(GF_RTSPResponse *rsp, char *Header, char *Value) { char LineBuffer[400], buf[1000], param_name[100], param_val[1000]; s32 LinePos, Pos, nPos, s_val; GF_RTPInfo *info; GF_RTSPTransport *trans; GF_X_Attribute *x_Att; if (!stricmp(Header, "Accept")) rsp->Accept = gf_strdup(Value); else if (!stricmp(Header, "Accept-Encoding")) rsp->Accept_Encoding = gf_strdup(Value); else if (!stricmp(Header, "Accept-Language")) rsp->Accept_Language = gf_strdup(Value); else if (!stricmp(Header, "Allow")) rsp->Allow = gf_strdup(Value); else if (!stricmp(Header, "Authorization")) rsp->Authorization = gf_strdup(Value); else if (!stricmp(Header, "Bandwidth")) sscanf(Value, "%u", &rsp->Bandwidth); else if (!stricmp(Header, "Blocksize")) sscanf(Value, "%u", &rsp->Blocksize); else if (!stricmp(Header, "Cache-Control")) rsp->Cache_Control = gf_strdup(Value); else if (!stricmp(Header, "com.ses.streamID")) sscanf(Value, "%u", &rsp->StreamID); else if (!stricmp(Header, "Conference")) rsp->Conference = gf_strdup(Value); else if (!stricmp(Header, "Connection")) rsp->Connection = gf_strdup(Value); else if (!stricmp(Header, "Content-Base")) rsp->Content_Base = gf_strdup(Value); else if (!stricmp(Header, "Content-Encoding")) rsp->Content_Encoding = gf_strdup(Value); else if (!stricmp(Header, "Content-Length")) sscanf(Value, "%u", &rsp->Content_Length); else if (!stricmp(Header, "Content-Language")) rsp->Content_Language = gf_strdup(Value); else if (!stricmp(Header, "Content-Location")) rsp->Content_Location = gf_strdup(Value); else if (!stricmp(Header, "Content-Type")) rsp->Content_Type = gf_strdup(Value); else if (!stricmp(Header, "CSeq")) sscanf(Value, "%u", &rsp->CSeq); else if (!stricmp(Header, "Date")) rsp->Date = gf_strdup(Value); else if (!stricmp(Header, "Expires")) rsp->Expires = gf_strdup(Value); else if (!stricmp(Header, "From")) rsp->From = gf_strdup(Value); else if (!stricmp(Header, "Host")) rsp->Host = gf_strdup(Value); else if (!stricmp(Header, "If-Match")) rsp->If_Match = gf_strdup(Value); else if (!stricmp(Header, "If-Modified-Since")) rsp->If_Modified_Since = gf_strdup(Value); else if (!stricmp(Header, "Last-Modified")) rsp->Last_Modified = gf_strdup(Value); else if (!stricmp(Header, "Location")) rsp->Location = gf_strdup(Value); else if (!stricmp(Header, "Proxy-Authenticate")) rsp->Proxy_Authenticate = gf_strdup(Value); else if (!stricmp(Header, "Proxy-Require")) rsp->Proxy_Require = gf_strdup(Value); else if (!stricmp(Header, "Public")) rsp->Public = gf_strdup(Value); else if (!stricmp(Header, "Referer")) rsp->Referer = gf_strdup(Value); else if (!stricmp(Header, "Require")) rsp->Require = gf_strdup(Value); else if (!stricmp(Header, "Retry-After")) rsp->Retry_After = gf_strdup(Value); else if (!stricmp(Header, "Scale")) sscanf(Value, "%lf", &rsp->Scale); else if (!stricmp(Header, "Server")) rsp->Server = gf_strdup(Value); else if (!stricmp(Header, "Speed")) sscanf(Value, "%lf", &rsp->Speed); else if (!stricmp(Header, "Timestamp")) rsp->Timestamp = gf_strdup(Value); else if (!stricmp(Header, "Unsupported")) rsp->Unsupported = gf_strdup(Value); else if (!stricmp(Header, "User-Agent")) rsp->User_Agent = gf_strdup(Value); else if (!stricmp(Header, "Vary")) rsp->Vary = gf_strdup(Value); else if (!stricmp(Header, "Via")) rsp->Vary = gf_strdup(Value); else if (!stricmp(Header, "WWW_Authenticate")) rsp->Vary = gf_strdup(Value); else if (!stricmp(Header, "Transport")) { LinePos = 0; while (1) { LinePos = gf_token_get(Value, LinePos, "\r\n", LineBuffer, 400); if (LinePos <= 0) return; trans = gf_rtsp_transport_parse(Value); if (trans) gf_list_add(rsp->Transports, trans); } } //Session else if (!stricmp(Header, "Session")) { LinePos = gf_token_get(Value, 0, ";\r\n", LineBuffer, 400); rsp->Session = gf_strdup(LineBuffer); //get timeout if any if (Value[LinePos] == ';') { LinePos += 1; /*LinePos = */gf_token_get(Value, LinePos, ";\r\n", LineBuffer, 400); rsp->SessionTimeOut = 60; //default sscanf(LineBuffer, "timeout=%u", &rsp->SessionTimeOut); } } //Range else if (!stricmp(Header, "Range")) rsp->Range = gf_rtsp_range_parse(Value); //RTP-Info else if (!stricmp(Header, "RTP-Info")) { LinePos = 0; while (1) { LinePos = gf_token_get(Value, LinePos, ",\r\n", LineBuffer, 400); if (LinePos <= 0) return; GF_SAFEALLOC(info, GF_RTPInfo); if (!info) return; Pos = 0; while (1) { Pos = gf_token_get(LineBuffer, Pos, " ;", buf, 1000); if (Pos <= 0) break; if (strstr(buf, "=")) { nPos = gf_token_get(buf, 0, "=", param_name, 100); nPos += 1; /*nPos = */gf_token_get(buf, nPos, "", param_val, 1000); } else { strcpy(param_name, buf); } if (!stricmp(param_name, "url")) info->url = gf_strdup(param_val); else if (!stricmp(param_name, "seq")) sscanf(param_val, "%u", &info->seq); else if (!stricmp(param_name, "rtptime")) { sscanf(param_val, "%i", &s_val); info->rtp_time = (s_val>0) ? s_val : 0; } else if (!stricmp(param_name, "ssrc")) { sscanf(param_val, "%i", &s_val); info->ssrc = (s_val>0) ? s_val : 0; } } gf_list_add(rsp->RTP_Infos, info); } } //check for extended attributes else if (!strnicmp(Header, "x-", 2)) { x_Att = (GF_X_Attribute*)gf_malloc(sizeof(GF_X_Attribute)); x_Att->Name = gf_strdup(Header+2); x_Att->Value = NULL; if (Value && strlen(Value)) x_Att->Value = gf_strdup(Value); gf_list_add(rsp->Xtensions, x_Att); } //unknown field - skip it }
static GF_Err AC3_ChannelGetSLP(GF_InputService *plug, LPNETCHANNEL channel, char **out_data_ptr, u32 *out_data_size, GF_SLHeader *out_sl_hdr, Bool *sl_compressed, GF_Err *out_reception_status, Bool *is_new_data) { u64 pos, start_from; Bool sync; GF_BitStream *bs; GF_AC3Header hdr; AC3Reader *read = plug->priv; *out_reception_status = GF_OK; *sl_compressed = 0; *is_new_data = 0; memset(&read->sl_hdr, 0, sizeof(GF_SLHeader)); read->sl_hdr.randomAccessPointFlag = 1; read->sl_hdr.compositionTimeStampFlag = 1; if (read->ch != channel) return GF_STREAM_NOT_FOUND; /*fetching es data*/ if (read->done) { *out_reception_status = GF_EOS; return GF_OK; } if (!read->data) { if (!read->stream) { *out_data_ptr = NULL; *out_data_size = 0; return GF_OK; } bs = gf_bs_from_file(read->stream, GF_BITSTREAM_READ); *is_new_data = 1; fetch_next: pos = gf_f64_tell(read->stream); sync = gf_ac3_parser_bs(bs, &hdr, 0); if (!sync) { gf_bs_del(bs); if (!read->dnload) { *out_reception_status = GF_EOS; read->done = 1; } else { gf_f64_seek(read->stream, pos, SEEK_SET); *out_reception_status = GF_OK; } return GF_OK; } if (!hdr.framesize) { gf_bs_del(bs); *out_reception_status = GF_EOS; read->done = 1; return GF_OK; } read->data_size = hdr.framesize; read->nb_samp = 1536; /*we're seeking*/ if (read->start_range && read->duration) { start_from = (u32) (read->start_range * read->sample_rate); if (read->current_time + read->nb_samp < start_from) { read->current_time += read->nb_samp; goto fetch_next; } else { read->start_range = 0; } } read->sl_hdr.compositionTimeStamp = read->current_time; read->data = gf_malloc(sizeof(char) * (read->data_size+read->pad_bytes)); gf_bs_read_data(bs, read->data, read->data_size); if (read->pad_bytes) memset(read->data + read->data_size, 0, sizeof(char) * read->pad_bytes); gf_bs_del(bs); } *out_sl_hdr = read->sl_hdr; *out_data_ptr = read->data; *out_data_size = read->data_size; return GF_OK; }
GF_Err RTSP_WriteResponse(GF_RTSPSession *sess, GF_RTSPResponse *rsp, unsigned char **out_buffer, u32 *out_size) { u32 i, cur_pos, size, count; char *buffer, temp[50]; GF_RTSPTransport *trans; GF_X_Attribute *att; GF_RTPInfo *info; *out_buffer = NULL; size = RTSP_WRITE_STEPALLOC; buffer = (char *) gf_malloc(size); cur_pos = 0; //RTSP line RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, GF_RTSP_VERSION); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, " "); RTSP_WRITE_INT(buffer, size, cur_pos, rsp->ResponseCode, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, " "); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, gf_rtsp_nc_to_string(rsp->ResponseCode)); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); //all headers RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept", rsp->Accept); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept-Encoding", rsp->Accept_Encoding); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Accept-Language", rsp->Accept_Language); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Allow", rsp->Allow); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Authorization", rsp->Authorization); if (rsp->Bandwidth) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Bandwidth: "); RTSP_WRITE_INT(buffer, size, cur_pos, rsp->Bandwidth, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } if (rsp->Blocksize) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Blocksize: "); RTSP_WRITE_INT(buffer, size, cur_pos, rsp->Blocksize, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Cache-Control", rsp->Cache_Control); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Conference", rsp->Conference); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Connection", rsp->Connection); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Content-Base", rsp->Content_Base); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Content-Encoding", rsp->Content_Encoding); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Content-Language", rsp->Content_Language); //if we have a body write the content length if (rsp->body) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Content-Length: "); RTSP_WRITE_INT(buffer, size, cur_pos, (u32) strlen(rsp->body), 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Content-Location", rsp->Content_Location); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Content-Type", rsp->Content_Type); //write the CSeq - use the RESPONSE CSeq RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "CSeq: "); RTSP_WRITE_INT(buffer, size, cur_pos, rsp->CSeq, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Date", rsp->Date); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Expires", rsp->Expires); RTSP_WRITE_HEADER(buffer, size, cur_pos, "From", rsp->From); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Host", rsp->Host); RTSP_WRITE_HEADER(buffer, size, cur_pos, "If-Match", rsp->If_Match); RTSP_WRITE_HEADER(buffer, size, cur_pos, "If-Modified-Since", rsp->If_Modified_Since); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Last-Modified", rsp->Last_Modified); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Location", rsp->Location); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Proxy-Authenticate", rsp->Proxy_Authenticate); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Proxy-Require", rsp->Proxy_Require); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Public", rsp->Public); //Range, only NPT if (rsp->Range && !rsp->Range->UseSMPTE) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Range: npt:"); RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, rsp->Range->start); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-"); if (rsp->Range->end > rsp->Range->start) { RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, rsp->Range->end); } RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Referer", rsp->Referer); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Require", rsp->Require); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Retry-After", rsp->Retry_After); //RTP Infos count = gf_list_count(rsp->RTP_Infos); if (count) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "RTPInfo: "); for (i=0; i<count; i++) { //line separator for headers if (i) RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n ,"); info = (GF_RTPInfo*)gf_list_get(rsp->RTP_Infos, i); if (info->url) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "url="); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, info->url); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";"); } RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "seq="); RTSP_WRITE_INT(buffer, size, cur_pos, info->seq, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";rtptime="); RTSP_WRITE_INT(buffer, size, cur_pos, info->rtp_time, 0); } RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } if (rsp->Scale != 0.0) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Scale: "); RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, rsp->Scale); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Server", rsp->Server); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Session", rsp->Session); if (rsp->Speed != 0.0) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Scale: "); RTSP_WRITE_FLOAT_WITHOUT_CHECK(buffer, size, cur_pos, rsp->Speed); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Timestamp", rsp->Timestamp); //transport info count = gf_list_count(rsp->Transports); if (count) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "Transport: "); for (i=0; i<count; i++) { //line separator for headers if (i) RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n ,"); trans = (GF_RTSPTransport*)gf_list_get(rsp->Transports, i); //then write the structure RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->Profile); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, (trans->IsUnicast ? ";unicast" : ";multicast")); if (trans->destination) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";destination="); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->destination); } if (trans->source) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";source="); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, trans->source); } if (trans->IsRecord) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";mode=RECORD"); if (trans->Append) RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";append"); } if (trans->IsInterleaved) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";interleaved="); RTSP_WRITE_INT(buffer, size, cur_pos, trans->rtpID, 0); if (trans->rtcpID != trans->rtpID) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-"); RTSP_WRITE_INT(buffer, size, cur_pos, trans->rtcpID, 0); } } //multicast specific if (!trans->IsUnicast) { if (trans->MulticastLayers) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";layers="); RTSP_WRITE_INT(buffer, size, cur_pos, trans->MulticastLayers, 0); } if (trans->TTL) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";ttl="); RTSP_WRITE_INT(buffer, size, cur_pos, trans->TTL, 0); } } if (trans->port_first) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, (const char *) (trans->IsUnicast ? ";server_port=" : ";port=")); RTSP_WRITE_INT(buffer, size, cur_pos, trans->port_first, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-"); RTSP_WRITE_INT(buffer, size, cur_pos, trans->port_last, 0); } if (trans->IsUnicast && trans->client_port_first) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";client_port="); RTSP_WRITE_INT(buffer, size, cur_pos, trans->client_port_first, 0); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "-"); RTSP_WRITE_INT(buffer, size, cur_pos, trans->client_port_last, 0); } if (trans->SSRC) { RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, ";ssrc="); RTSP_WRITE_INT(buffer, size, cur_pos, trans->SSRC, 0); } } //done with transport RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); } RTSP_WRITE_HEADER(buffer, size, cur_pos, "Unsupported", rsp->Unsupported); RTSP_WRITE_HEADER(buffer, size, cur_pos, "User-Agent", rsp->User_Agent); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Vary", rsp->Vary); RTSP_WRITE_HEADER(buffer, size, cur_pos, "Via", rsp->Via); RTSP_WRITE_HEADER(buffer, size, cur_pos, "WWW-Authenticate", rsp->WWW_Authenticate); //eXtensions count = gf_list_count(rsp->Xtensions); for (i=0; i<count; i++) { att = (GF_X_Attribute*)gf_list_get(rsp->Xtensions, i); RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "x-"); RTSP_WRITE_HEADER(buffer, size, cur_pos, att->Name, att->Value); } //end of header RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, "\r\n"); //then body RTSP_WRITE_ALLOC_STR(buffer, size, cur_pos, rsp->body); *out_buffer = (unsigned char *) buffer; *out_size = (u32) strlen(buffer); return GF_OK; }
void oggpack_writeinit(oggpack_buffer *b) { memset(b,0,sizeof(*b)); b->ptr = b->buffer = (unsigned char *)gf_malloc(BUFFER_INCREMENT); b->buffer[0]='\0'; b->storage=BUFFER_INCREMENT; }
GF_Err SFScript_Parse(GF_BifsDecoder *codec, SFScript *script_field, GF_BitStream *bs, GF_Node *n) { GF_Err e; u32 i, count, nbBits; char *ptr; ScriptParser parser; e = GF_OK; if (gf_node_get_tag(n) != TAG_MPEG4_Script) return GF_NON_COMPLIANT_BITSTREAM; parser.codec = codec; parser.script = n; parser.bs = bs; parser.length = 500; parser.string = (char *) gf_malloc(sizeof(char)* parser.length); parser.string[0] = 0; parser.identifiers = gf_list_new(); parser.new_line = (char *) (codec->dec_memory_mode ? "\n" : NULL); parser.indent = 0; //first parse fields if (gf_bs_read_int(bs, 1)) { //endFlag while (!gf_bs_read_int(bs, 1)){ e = ParseScriptField(&parser); if (e) goto exit; } } else { nbBits = gf_bs_read_int(bs, 4); count = gf_bs_read_int(bs, nbBits); for (i=0; i<count; i++) { e = ParseScriptField(&parser); if (e) goto exit; } } //reserevd gf_bs_read_int(bs, 1); //then parse SFS_AddString(&parser, "javascript:"); SFS_AddString(&parser, parser.new_line); //hasFunction while (gf_bs_read_int(bs, 1)) { SFS_AddString(&parser, "function "); SFS_Identifier(&parser); SFS_Arguments(&parser, 0); SFS_Space(&parser); SFS_StatementBlock(&parser, 1); SFS_Line(&parser); } SFS_Line(&parser); if (script_field->script_text) gf_free(script_field->script_text); script_field->script_text = (unsigned char *) gf_strdup(parser.string); exit: //clean up while (gf_list_count(parser.identifiers)) { ptr = (char *)gf_list_get(parser.identifiers, 0); gf_free(ptr); gf_list_rem(parser.identifiers, 0); } gf_list_del(parser.identifiers); if (parser.string) gf_free(parser.string); return e; }
GF_Err Media_GetESD(GF_MediaBox *mdia, u32 sampleDescIndex, GF_ESD **out_esd, Bool true_desc_only) { GF_ESD *esd; GF_MPEGSampleEntryBox *entry = NULL; GF_ESDBox *ESDa; GF_SampleDescriptionBox *stsd = mdia->information->sampleTable->SampleDescription; *out_esd = NULL; if (!stsd || !stsd->other_boxes || !sampleDescIndex || (sampleDescIndex > gf_list_count(stsd->other_boxes)) ) return GF_BAD_PARAM; esd = NULL; entry = (GF_MPEGSampleEntryBox*)gf_list_get(stsd->other_boxes, sampleDescIndex - 1); if (! entry) return GF_ISOM_INVALID_MEDIA; *out_esd = NULL; ESDa = NULL; switch (entry->type) { case GF_ISOM_BOX_TYPE_MP4V: case GF_ISOM_BOX_TYPE_ENCV: ESDa = ((GF_MPEGVisualSampleEntryBox*)entry)->esd; if (ESDa) esd = (GF_ESD *) ESDa->desc; /*avc1 encrypted*/ else esd = ((GF_MPEGVisualSampleEntryBox*) entry)->emul_esd; break; case GF_ISOM_BOX_TYPE_AVC1: case GF_ISOM_BOX_TYPE_AVC2: case GF_ISOM_BOX_TYPE_AVC3: case GF_ISOM_BOX_TYPE_AVC4: case GF_ISOM_BOX_TYPE_SVC1: case GF_ISOM_BOX_TYPE_HVC1: case GF_ISOM_BOX_TYPE_HEV1: case GF_ISOM_BOX_TYPE_HVC2: case GF_ISOM_BOX_TYPE_HEV2: case GF_ISOM_BOX_TYPE_SHC1: case GF_ISOM_BOX_TYPE_SHV1: esd = ((GF_MPEGVisualSampleEntryBox*) entry)->emul_esd; break; case GF_ISOM_BOX_TYPE_MP4A: case GF_ISOM_BOX_TYPE_ENCA: ESDa = ((GF_MPEGAudioSampleEntryBox*)entry)->esd; if (ESDa) esd = (GF_ESD *) ESDa->desc; break; case GF_ISOM_BOX_TYPE_MP4S: case GF_ISOM_BOX_TYPE_ENCS: ESDa = entry->esd; if (ESDa) esd = (GF_ESD *) ESDa->desc; break; case GF_ISOM_BOX_TYPE_TX3G: case GF_ISOM_BOX_TYPE_TEXT: if (!true_desc_only && mdia->mediaTrack->moov->mov->convert_streaming_text) { GF_Err e = gf_isom_get_ttxt_esd(mdia, out_esd); if (e) return e; break; } else return GF_ISOM_INVALID_MEDIA; #ifndef GPAC_DISABLE_TTXT case GF_ISOM_BOX_TYPE_WVTT: { GF_BitStream *bs; esd = gf_odf_desc_esd_new(2); *out_esd = esd; esd->decoderConfig->streamType = GF_STREAM_TEXT; esd->decoderConfig->objectTypeIndication = GPAC_OTI_SCENE_VTT_MP4; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, entry->type); boxstring_Write((GF_Box *)((GF_WebVTTSampleEntryBox*)entry)->config, bs); gf_bs_get_content(bs, & esd->decoderConfig->decoderSpecificInfo->data, & esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); } break; case GF_ISOM_BOX_TYPE_STSE: { GF_BitStream *bs; esd = gf_odf_desc_esd_new(2); *out_esd = esd; esd->decoderConfig->streamType = GF_STREAM_TEXT; esd->decoderConfig->objectTypeIndication = GPAC_OTI_SCENE_SIMPLE_TEXT_MP4; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, entry->type); boxstring_Write((GF_Box *)((GF_SimpleTextSampleEntryBox*)entry)->config, bs); gf_bs_get_content(bs, & esd->decoderConfig->decoderSpecificInfo->data, & esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); } break; #endif case GF_ISOM_SUBTYPE_3GP_AMR: case GF_ISOM_SUBTYPE_3GP_AMR_WB: case GF_ISOM_SUBTYPE_3GP_EVRC: case GF_ISOM_SUBTYPE_3GP_QCELP: case GF_ISOM_SUBTYPE_3GP_SMV: if (!true_desc_only) { GF_Err e = gf_isom_get_3gpp_audio_esd(mdia->information->sampleTable, (GF_GenericAudioSampleEntryBox*)entry, out_esd); if (e) return e; break; } else return GF_ISOM_INVALID_MEDIA; case GF_ISOM_SUBTYPE_3GP_H263: if (true_desc_only) { return GF_ISOM_INVALID_MEDIA; } else { GF_BitStream *bs; esd = gf_odf_desc_esd_new(2); *out_esd = esd; esd->decoderConfig->streamType = GF_STREAM_VISUAL; esd->decoderConfig->objectTypeIndication = GPAC_OTI_MEDIA_GENERIC; bs = gf_bs_new(NULL, 0, GF_BITSTREAM_WRITE); gf_bs_write_u32(bs, entry->type); gf_bs_write_u16(bs, ((GF_MPEGVisualSampleEntryBox*)entry)->Width); gf_bs_write_u16(bs, ((GF_MPEGVisualSampleEntryBox*)entry)->Height); gf_bs_get_content(bs, & esd->decoderConfig->decoderSpecificInfo->data, & esd->decoderConfig->decoderSpecificInfo->dataLength); gf_bs_del(bs); break; } case GF_ISOM_SUBTYPE_LSR1: if (true_desc_only) { return GF_ISOM_INVALID_MEDIA; } else { GF_LASeRSampleEntryBox*ptr = (GF_LASeRSampleEntryBox*)entry; esd = gf_odf_desc_esd_new(2); *out_esd = esd; esd->decoderConfig->streamType = GF_STREAM_SCENE; esd->decoderConfig->objectTypeIndication = GPAC_OTI_SCENE_LASER; esd->decoderConfig->decoderSpecificInfo->dataLength = ptr->lsr_config->hdr_size; esd->decoderConfig->decoderSpecificInfo->data = gf_malloc(sizeof(char)*ptr->lsr_config->hdr_size); memcpy(esd->decoderConfig->decoderSpecificInfo->data, ptr->lsr_config->hdr, sizeof(char)*ptr->lsr_config->hdr_size); break; } default: return GF_ISOM_INVALID_MEDIA; } if (true_desc_only) { if (!esd) return GF_ISOM_INVALID_MEDIA; *out_esd = esd; return GF_OK; } else { if (!esd && !*out_esd) return GF_ISOM_INVALID_MEDIA; if (*out_esd == NULL) gf_odf_desc_copy((GF_Descriptor *)esd, (GF_Descriptor **)out_esd); } return GF_OK; }
//------------------------------- // dir should end with / int CNativeWrapper::init(JNIEnv * env, void * bitmap, jobject * callback, int width, int height, const char * cfg_dir, const char * modules_dir, const char * cache_dir, const char * font_dir, const char * urlToLoad) { LOGI("Initializing GPAC with URL=%s...", urlToLoad); strcpy(m_cfg_dir, cfg_dir); strcpy(m_modules_dir, modules_dir); strcpy(m_cache_dir, cache_dir); strcpy(m_font_dir, font_dir); char m_cfg_filename[GF_MAX_PATH]; strcpy(m_cfg_filename, m_cfg_dir); strcat(m_cfg_filename, "GPAC.cfg"); int m_Width = width; int m_Height = height; int first_launch = 0; const char *opt; m_window = env; m_session = bitmap; if (!mainJavaEnv) mainJavaEnv = (JavaEnvTh *) gf_malloc(sizeof(JavaEnvTh)); memset(mainJavaEnv, 0, sizeof(JavaEnvTh)); setJavaEnv(mainJavaEnv, env, env->NewGlobalRef(*callback)); if (pthread_setspecific( jni_thread_env_key, mainJavaEnv)) { LOGE("Failed to set specific thread data to jni_thread_env_key=%p for main thread !", jni_thread_env_key); } m_mx = gf_mx_new("Osmo4"); //load config file LOGI("Loading User Config %s...", "GPAC.cfg"); m_user.config = gf_cfg_force_new(cfg_dir, "GPAC.cfg"); gf_set_progress_callback(this, Osmo4_progress_cbk); opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory"); if (!opt) { FILE * fstart; char msg[256]; LOGI("First launch, initializing new Config %s...", "GPAC.cfg"); /*hardcode module directory*/ gf_cfg_set_key(m_user.config, "Downloader", "CleanCache", "yes"); /*startup file*/ snprintf(msg, 256, "%sgui/gui.bt", cfg_dir); fstart = fopen(msg, "r"); if (fstart) { fclose(fstart); gf_cfg_set_key(m_user.config, "General", "StartupFile", msg); } else { gf_cfg_set_key(m_user.config, "General", "#StartupFile", msg); } gf_cfg_set_key(m_user.config, "GUI", "UnhideControlPlayer", "1"); /*setup UDP traffic autodetect*/ gf_cfg_set_key(m_user.config, "Network", "AutoReconfigUDP", "yes"); gf_cfg_set_key(m_user.config, "Network", "UDPTimeout", "10000"); gf_cfg_set_key(m_user.config, "Network", "BufferLength", "3000"); gf_cfg_set_key(m_user.config, "Compositor", "TextureTextMode", "Default"); //gf_cfg_set_key(m_user.config, "Compositor", "FrameRate", "30"); gf_cfg_set_key(m_user.config, "Audio", "ForceConfig", "no"); gf_cfg_set_key(m_user.config, "Audio", "NumBuffers", "1"); gf_cfg_set_key(m_user.config, "FontEngine", "FontReader", "ft_font"); } /* All of this has to be done for every instance */ gf_cfg_set_key(m_user.config, "General", "ModulesDirectory", modules_dir ? modules_dir : GPAC_MODULES_DIR); gf_cfg_set_key(m_user.config, "General", "CacheDirectory", cache_dir ? cache_dir : GPAC_CACHE_DIR); gf_cfg_set_key(m_user.config, "General", "LastWorkingDir", cfg_dir); gf_cfg_set_key(m_user.config, "FontEngine", "FontDirectory", GPAC_FONT_DIR); gf_cfg_set_key(m_user.config, "Video", "DriverName", "Android Video Output"); gf_cfg_set_key(m_user.config, "Audio", "DriverName", "Android Audio Output"); opt = gf_cfg_get_key(m_user.config, "General", "ModulesDirectory"); LOGI("loading modules in directory %s...", opt); m_user.modules = gf_modules_new(opt, m_user.config); if (!m_user.modules || !gf_modules_get_count(m_user.modules)) { LOGE("No modules found in directory %s !", opt); if (m_user.modules) gf_modules_del(m_user.modules); gf_cfg_del(m_user.config); m_user.config = NULL; return Quit(KErrGeneral); } /*we don't thread the visual compositor to be able to minimize the app and still have audio running*/ m_user.init_flags = GF_TERM_NO_COMPOSITOR_THREAD; m_user.opaque = this; m_user.os_window_handler = m_window; m_user.os_display = m_session; m_user.EventProc = GPAC_EventProc; if (!javaVM) { LOGE("NO JAVA VM FOUND, m_user=%p !!!!\n", &m_user); return Quit(KErrGeneral); } LOGD("Loading GPAC terminal, m_user=%p...", &m_user); gf_sys_init(GF_FALSE); gf_fm_request_set_callback(this, on_fm_request); SetupLogs(); m_term = gf_term_new(&m_user); if (!m_term) { LOGE("Cannot load GPAC Terminal with m_user=%p", m_user); MessageBox("Cannot load GPAC terminal", "Fatal Error", GF_SERVICE_ERROR); gf_modules_del(m_user.modules); m_user.modules = NULL; gf_cfg_del(m_user.config); m_user.config = NULL; return Quit(KErrGeneral); } //setAudioEnvironment(javaVM); LOGD("Setting term size m_user=%p...", &m_user); gf_term_set_size(m_term, m_Width, m_Height); opt = gf_cfg_get_key(m_user.config, "General", "StartupFile"); LOGD("File loaded at startup=%s.", opt); if (!urlToLoad) urlToLoad = opt; if (urlToLoad) { LOGI("Connecting to %s...", urlToLoad); gf_term_connect(m_term, urlToLoad); } debug_log("init end"); LOGD("Saving config file %s...\n", m_cfg_filename); gf_cfg_save(m_user.config); LOGI("Initialization complete, config file saved as %s.\n", m_cfg_filename); return 0; }
PNC_CallbackData *PNC_Init_SceneGenerator(GF_RTPChannel *p_chan, GF_RTPHeader *p_hdr, char *default_scene, u32 socketType, u16 socketPort, int debug) { GF_Err e; PNC_CallbackData *data = gf_malloc(sizeof(PNC_CallbackData)); int *i; data->chan = p_chan; data->hdr = p_hdr; data->debug = debug; memset( (void*) (data->buffer), '\0', RECV_BUFFER_SIZE_FOR_COMMANDS); data->bufferPosition = 0; /* Loading the initial scene as the encoding context */ data->codec = gf_seng_init((void*)data, default_scene); if (!data->codec) { fprintf(stderr, "Cannot create BIFS Engine from %s\n", default_scene); gf_free(data); return NULL; } data->server_socket = NULL; data->socket = NULL; if (socketType == GF_SOCK_TYPE_TCP) { data->server_socket = gf_sk_new(socketType); e = gf_sk_bind(data->server_socket, NULL, (u16) socketPort, NULL, 0, 0); if (e) fprintf(stderr, "Failed to bind : %s\n", gf_error_to_string(e)); e |= gf_sk_listen(data->server_socket, 1); if (e) fprintf(stderr, "Failed to listen : %s\n", gf_error_to_string(e)); e |= gf_sk_set_block_mode(data->server_socket, 0); if (e) fprintf(stderr, "Failed to set block mode : %s\n", gf_error_to_string(e)); e |= gf_sk_server_mode(data->server_socket, 0); if (e) fprintf(stderr, "Failed to set server mode : %s\n", gf_error_to_string(e)); } else { data->socket = gf_sk_new(socketType); e = gf_sk_bind(data->socket, NULL, (u16) socketPort, NULL, 0, 0); } /* char buffIp[1024]; u16 port = 0; u32 socket_type = 0; e |= gf_sk_get_local_ip(data->socket, buffIp); e |= gf_sk_get_local_info(data->socket, &port, &socket_type); dprintf(DEBUG_RTP_serv_generator, "RTS_serv_generator %s:%d %s\n", buffIp, port, socket_type==GF_SOCK_TYPE_UDP?"UDP":"TCP", e==GF_OK?"OK":"ERROR"); */ if (e) { fprintf(stderr, "Cannot bind socket to port %d (%s)\n", socketPort, gf_error_to_string(e)); if (data->socket) gf_sk_del(data->socket); if (data->server_socket) gf_sk_del(data->server_socket); gf_free(data); return NULL; } data->extension = gf_malloc(sizeof(PNC_CallbackExt)); ((PNC_CallbackExt * )data->extension)->i = 0; ((PNC_CallbackExt * )data->extension)->lastTS = 0; i = &((PNC_CallbackExt*)data->extension)->i; return data; }