TEST_F(LocalIds, using_entities) { if (get_parallel_size() == 1) { setup_mesh("generated:2x2x2", stk::mesh::BulkData::AUTO_AURA); BulkDataHelper bulkDataHelper(get_bulk()); typedef stk::mesh::Field<double, stk::mesh::Cartesian3d> CoordFieldType; CoordFieldType *coords = get_meta().get_field<CoordFieldType>(stk::topology::NODE_RANK, "coordinates"); unsigned elemIndex = 0; const stk::mesh::BucketVector& elemBuckets = get_bulk().buckets(stk::topology::ELEM_RANK); for(size_t i=0;i<elemBuckets.size();++i) { const stk::mesh::Bucket& bucket = *elemBuckets[i]; for(size_t j=0;j<bucket.size();++j) { Entities nodes = bulkDataHelper.get_nodes(bucket[j]); for(unsigned k=0;k<nodes.size();++k) { double *node_data = stk::mesh::field_data(*coords, nodes[k]); EXPECT_NEAR(gold_x_coordinates[elemIndex][k], node_data[0], 1.e-6); } ++elemIndex; } } } }
virtual void allocate_bulk(stk::mesh::BulkData::AutomaticAuraOption auraOption) { if(nullptr == metaData) allocate_meta(); bulkData = new stk::mesh::BulkData(get_meta(), communicator, auraOption); }
void item_button::perform_logic_step(augs::gui::gui_world& gr) { enable_drawing_of_children = is_container_open && !is_being_dragged(gr); disable_hovering = is_being_dragged(gr); rect::perform_logic_step(gr); if (is_inventory_root()) return; vec2i parent_position; auto* sprite = item->find<components::sprite>(); if (sprite) { with_attachments_bbox = iterate_children_attachments(); vec2i rounded_size = with_attachments_bbox.get_size(); rounded_size += 22; rounded_size += resource_manager.find(sprite->tex)->gui_sprite_def.gui_bbox_expander; rounded_size /= 11; rounded_size *= 11; //rounded_size.x = std::max(rounded_size.x, 33); //rounded_size.y = std::max(rounded_size.y, 33); rc.set_size(rounded_size); } auto parent_slot = item->get<components::item>().current_slot; if (parent_slot->is_attachment_slot) { rc.set_position(get_meta(parent_slot).rc.get_position()); } else { rc.set_position(drag_offset_in_item_deposit); } }
void set_symbolizer_property(Symbolizer & sym, keys key, xml_node const& node) { std::string const& name = std::get<0>(get_meta(key)); if (node.has_attribute(name)) { detail::set_symbolizer_property_impl<Symbolizer,T, std::is_enum<T>::value>::apply(sym,key,name,node); } }
bool user_event_filter_t::allows(const user_event_meta_t& evt) const { list_t::const_iterator it = get_meta(evt.kind()); if(it != m_list.end()) // this event kind is allowed { // check for "any event" type being allowed by this filter if(it->types().find(user_event_meta_t::PERMIT_ALL) != it->types().end()) { return true; } // check if event has more types than this filter if(evt.types().size() > it->types().size()) { return false; } // if all event types are present in this filter, event is allowed for(auto const& type : evt.types()) { if(it->types().find(type) == it->types().end()) { return false; } } return true; } return false; }
//批量读取边,返回所有的边,源顶点和目的顶点就没有规律了,还要在上面封装接口来读取某个源顶点和目的顶点的边,也可以由用户完成 //如果vertexes是空,则不会发送请求 uint32_t Client::read_two_edges(list<Two_vertex>& vertexes,list<Edge_u> &edges){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //找所有边的元数据,由于有缓存,这个操作不会成为瓶颈。边在获取元数据的时候按照ip分类 unordered_map<string,list<Two_vertex>*> classify; list<Two_vertex>::iterator it=vertexes.begin(); unordered_map<string,list<Two_vertex>*>::iterator it_cl; string ip; while(it!=vertexes.end()){ ip=get_meta(graph_name,(*it).s_id); it_cl=classify.find(ip); if(it_cl==classify.end()){ //如果没有这个ip类,则创建,然后把边加入到该ip类 classify.insert(pair<string,list<Two_vertex>*>(ip,new list<Two_vertex>())); it_cl=classify.find(ip); it_cl->second->push_back(*it); }else{ it_cl->second->push_back(*it); } it++; } //元数据查完后,就开始分别把每个ip类的顶点对发送出去 it_cl=classify.begin(); while(it_cl!=classify.end()){ Requester req_slave(*find_sock(it_cl->first)); req_slave.ask(CMD_READ_TWO_EDGES,*(it_cl->second),graph_name); req_slave.parse_ans(edges); delete it_cl->second;//这个ip类的边插完了,则释放空间 it_cl++; } return STATUS_OK; }
void item_button::consume_gui_event(event_info info) { if (is_inventory_root()) return; detector.update_appearance(info); auto parent_slot = item->get<components::item>().current_slot; if (info == rect::gui_event::ldrag) { if (!started_drag) { started_drag = true; if (parent_slot->is_attachment_slot) if (get_meta(parent_slot).get_rect_absolute().hover(info.owner.state.mouse.pos)) { get_meta(parent_slot).houted_after_drag_started = false; } } } if (info == rect::gui_event::rclick) { is_container_open = !is_container_open; } if (info == rect::gui_event::lfinisheddrag) { started_drag = false; auto& gui = gui_element_entity->get_owner_world().get_system<gui_system>(); auto& drag_result = gui.prepare_drag_and_drop_result(); if (drag_result.possible_target_hovered && drag_result.will_drop_be_successful()) { gui.parent_world.post_message(drag_result.intent); } else if (!drag_result.possible_target_hovered) { vec2i griddified = griddify(info.owner.current_drag_amount); if (parent_slot->is_attachment_slot) { get_meta(parent_slot).user_drag_offset += griddified; get_meta(parent_slot).houted_after_drag_started = true; } else { drag_offset_in_item_deposit += griddified; } } } // if(being_dragged && inf == rect::gui_event::lup) }
TEST_F(DGTetFixture, tet) { std::vector<stk::mesh::EntityIdVector> tet_conn = { {1, 2, 3, 4}, // id 1 {2, 3, 4, 5} // id 2 }; std::vector< std::vector<double> > node_coords= { {0, 0, 0}, // 1 {1, 0, 0}, // 2 {0, 1, 0}, // 3 {0.5, 0.5, 1.0}, // 6...just kidding, it's 4 {1.0, 1.0, 1.0} }; setup_mesh(tet_conn, node_coords); stk::unit_test_util::write_mesh_using_stk_io("mike.g", get_bulk(), get_bulk().parallel()); ////////////////////////////////////////////////////////////////////////////////////// stk::mesh::EntityVector elements; stk::mesh::get_selected_entities(get_meta().locally_owned_part(), get_bulk().buckets(stk::topology::ELEM_RANK), elements); std::cerr << "num elements: " << elements.size() << std::endl; stk::mesh::create_exposed_boundary_sides(get_bulk(), get_meta().locally_owned_part(), {get_skin_part()}); unsigned num_faces = get_bulk().num_faces(elements[0]); const stk::mesh::Entity* faces = get_bulk().begin_faces(elements[0]); std::cerr << "num faces: " << num_faces << std::endl; for(unsigned i=0;i<num_faces;i++) { stk::mesh::Entity face = faces[i]; unsigned num_nodes = get_bulk().num_nodes(face); const stk::mesh::Entity* nodes = get_bulk().begin_nodes(face); for(unsigned j=0;j<num_nodes;++j) { std::cerr << "Node " << j+1 << " of face " << i+1 << " is " << get_bulk().identifier(nodes[j]) << std::endl; double *nodeCoord = static_cast<double*>(stk::mesh::field_data(*get_coord_field(), nodes[j])); std::cerr << "Has coordinates: " << nodeCoord[0] << " " << nodeCoord[1] << " " << nodeCoord[2] << std::endl; } } }
void create_face_per_proc(stk::mesh::Entity element, stk::mesh::EntityVector& nodes_of_face) { unsigned id = get_bulk().parallel_rank()+1; stk::topology side_topology = get_bulk().bucket(element).topology().side_topology(); stk::mesh::Entity side = stk::unit_test_util::declare_element_to_sub_topology_with_nodes(get_bulk(), element, nodes_of_face, id, get_meta().side_rank(), get_meta().get_topology_root_part(side_topology)); EXPECT_TRUE(get_bulk().is_valid(side)); }
//增加一条边,顶点不存在的时候不会自动创建顶点,添加边就会失败 uint32_t Client::add_edge(Edge_u &e){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //如果连接图了,则首先找图和源顶点的元数据,先在缓存中找,没找到再去master询问 string ip=get_meta(graph_name,e.s_id); Requester req_slave(*find_sock(ip)); proto_edge_u mes_slave(graph_name,e); req_slave.ask(CMD_ADD_EDGE,&mes_slave,sizeof(proto_edge_u)); req_slave.parse_ans(); return req_slave.get_status();//返回结果 }
//返回一个顶点的所有边 uint32_t Client::read_edges(v_type id,list<Edge_u>& edges){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //如果连接图了,则首先找图和源顶点的元数据,先在缓存中找,没找到再去master询问 string ip=get_meta(graph_name,id); Requester req_slave(*find_sock(ip)); proto_graph_vertex mes_slave(graph_name,id); req_slave.ask(CMD_READ_EDGES,&mes_slave,sizeof(proto_graph_vertex)); req_slave.parse_ans(edges); return req_slave.get_status(); }
//增加一个顶点 uint32_t Client::add_vertex(Vertex_u &v){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //先得到元数据信息,也就是顶点所在ip string ip=get_meta(graph_name,v.id); //向slave添加顶点 Requester req_slave(*find_sock(ip)); proto_graph_vertex_u mes_slave(graph_name,v); req_slave.ask(CMD_ADD_VERTEX,&mes_slave,sizeof(proto_graph_vertex_u)); req_slave.parse_ans(); return req_slave.get_status();//返回结果 }
//多线程版,批量增加边,如果num不为空,则存储实际添加的边的数目,因为有些顶点可能不存在,添加就会失败 uint32_t Client::add_edges_pthread(list<Edge_u> &edges,uint32_t *num){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //找所有边的元数据,由于有缓存,这个操作不会成为瓶颈。边在获取元数据的时候按照ip分类 unordered_map<string,list<Edge_u>*> classify; list<Edge_u>::iterator it=edges.begin(); unordered_map<string,list<Edge_u>*>::iterator it_cl; string ip; while(it!=edges.end()){ ip=get_meta(graph_name,(*it).s_id); it_cl=classify.find(ip); if(it_cl==classify.end()){ //如果没有这个ip类,则创建,然后把边加入到该ip类 classify.insert(pair<string,list<Edge_u>*>(ip,new list<Edge_u>())); it_cl=classify.find(ip); it_cl->second->push_back(*it); }else{ it_cl->second->push_back(*it); } it++; } //元数据查完后,就开始分别把每个ip类的边发送出去 uint32_t size=classify.size(); Ip_Edges* datas=new Ip_Edges[size]; uint32_t index=0; pthread_t *threads=new pthread_t[size]; it_cl=classify.begin(); while(it_cl!=classify.end()){ datas[index].graph_name=graph_name; datas[index].sock=find_sock(it_cl->first); datas[index].edges=it_cl->second; datas[index].num=0; pthread_create(&threads[index],NULL,thread_add_edges,&datas[index]); index++; it_cl++; } //等待线程的运行完成 for(index=0;index<size;index++){ pthread_join(threads[index],NULL); } //计算成功添加的边数 if(num!=NULL){ *num=0; for(index=0;index<size;index++){ *num+=datas[index].num; } } //清理内存 for(index=0;index<size;index++){ delete datas[index].edges; } delete[] datas; delete[] threads; return STATUS_OK; }
void user_event_filter_t::add(user_event_meta_t&& evt) { if(handle_all(user_event_meta_t(evt))) { return; } if(get_meta(evt.kind()) == m_list.end()) { m_list.emplace(std::move(evt)); } }
void user_event_filter_t::add(const user_event_meta_t& evt) { if(handle_all(user_event_meta_t(evt))) { return; } if(get_meta(evt.kind()) == m_list.end()) { m_list.insert(evt); } }
static int restore_metadata( struct asfd *asfd, struct sbuf *sb, const char *fname, enum action act, const char *encpassword, int vss_restore, struct cntr *cntr) { // If it is directory metadata, try to make sure the directory // exists. Pass in NULL as the cntr, so no counting is done. // The actual directory entry will be coming after the metadata, // annoyingly. This is because of the way that the server is queuing // up directories to send after file data, so that the stat info on // them gets set correctly. if(act==ACTION_RESTORE) { size_t metalen=0; char *metadata=NULL; if(S_ISDIR(sb->statp.st_mode) && restore_dir(asfd, sb, fname, act, cntr, PROTO_2)) return -1; // Read in the metadata... if(get_meta(asfd, cntr, &metadata, &metalen)) return -1; if(metadata) { if(set_extrameta(asfd, NULL, fname, metadata, metalen, cntr)) { free_w(&metadata); // carry on if we could not do it return 0; } free_w(&metadata); #ifndef HAVE_WIN32 // set attributes again, since we just diddled with // the file attribs_set(asfd, fname, &(sb->statp), sb->winattr, cntr); #endif cntr_add(cntr, sb->path.cmd, 1); } } else cntr_add(cntr, sb->path.cmd, 1); return 0; }
//多线程批量读取边 uint32_t Client::read_two_edges_pthread(list<Two_vertex>& vertexes,list<Edge_u> **edges,uint32_t *size){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //找所有边的元数据,由于有缓存,这个操作不会成为瓶颈。边在获取元数据的时候按照ip分类 unordered_map<string,list<Two_vertex>*> classify; list<Two_vertex>::iterator it=vertexes.begin(); unordered_map<string,list<Two_vertex>*>::iterator it_cl; string ip; while(it!=vertexes.end()){ ip=get_meta(graph_name,(*it).s_id); it_cl=classify.find(ip); if(it_cl==classify.end()){ //如果没有这个ip类,则创建,然后把边加入到该ip类 classify.insert(pair<string,list<Two_vertex>*>(ip,new list<Two_vertex>())); it_cl=classify.find(ip); it_cl->second->push_back(*it); }else{ it_cl->second->push_back(*it); } it++; } //元数据查完后,就开始分别把每个ip类的顶点对发送出去,多线程的发送,所以要给每个线程数据 *size=classify.size(); *edges=new list<Edge_u>[*size]; Ip_Two_Vertex* datas=new Ip_Two_Vertex[*size]; uint32_t index=0; pthread_t *threads=new pthread_t[*size]; it_cl=classify.begin(); while(it_cl!=classify.end()){ datas[index].graph_name=graph_name; datas[index].sock=find_sock(it_cl->first); datas[index].vertexes=it_cl->second; datas[index].edges=&(*edges)[index]; pthread_create(&threads[index],NULL,thread_read_two_edges,&datas[index]); index++; it_cl++; } //等待线程的运行完成 for(index=0;index<*size;index++){ pthread_join(threads[index],NULL); } //清理内存 for(index=0;index<*size;index++){ delete datas[index].vertexes; } delete[] datas; delete[] threads; return STATUS_OK; }
//查询顶点的信息,如果顶点存在则返回ok状态,不存在返回 uint32_t Client::read_vertex(v_type id,Vertex_u& v,uint32_t *num){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //如果连接图了,则首先找图和顶点的元数据,先在缓存中找,没找到再去master询问 string ip=get_meta(graph_name,id); Requester req_slave(*find_sock(ip)); proto_graph_vertex mes_slave(graph_name,id); req_slave.ask(CMD_READ_VERTEX,&mes_slave,sizeof(proto_graph_vertex)); req_slave.parse_ans(); uint32_t res=req_slave.get_status(); if(res==STATUS_OK){ proto_vertex_num *mes=(proto_vertex_num*)req_slave.get_data(); v=mes->vertex; *num=mes->num; } return res; }
static void create_images(const X3PFile *x3pfile, GwyContainer *container) { gint id; for (id = 0; id < x3pfile->zres; id++) { GwyContainer *meta; guint n = x3pfile->xres*x3pfile->yres, k; GwyDataField *dfield, *mask; const gboolean *valid = x3pfile->valid + id*n; GQuark quark; gchar buf[40]; dfield = gwy_data_field_new(x3pfile->xres, x3pfile->yres, x3pfile->xres*x3pfile->dx, x3pfile->yres*x3pfile->dy, FALSE); memcpy(dfield->data, x3pfile->values + id*n, n*sizeof(gdouble)); for (k = 0; k < n; k++) { if (!valid[k]) dfield->data[k] = NAN; } quark = gwy_app_get_data_key_for_id(id); gwy_container_set_object(container, quark, dfield); gwy_si_unit_set_from_string(gwy_data_field_get_si_unit_xy(dfield), "m"); gwy_si_unit_set_from_string(gwy_data_field_get_si_unit_z(dfield), "m"); gwy_app_channel_title_fall_back(container, id); gwy_app_channel_check_nonsquare(container, id); if ((mask = gwy_app_channel_mask_of_nans(dfield, TRUE))) { quark = gwy_app_get_mask_key_for_id(id); gwy_container_set_object(container, quark, mask); g_object_unref(mask); } g_object_unref(dfield); if ((meta = get_meta(x3pfile))) { g_snprintf(buf, sizeof(buf), "/%u/meta", id); gwy_container_set_object_by_name(container, buf, meta); g_object_unref(meta); } } }
inline void set_property(Symbolizer & sym, mapnik::keys key, T const& val) { switch (std::get<3>(get_meta(key))) { case property_types::target_bool: set_property_impl<Symbolizer, std::integral_constant<property_types, property_types::target_bool> >::apply(sym,key,val); break; case property_types::target_integer: set_property_impl<Symbolizer, std::integral_constant<property_types, property_types::target_integer> >::apply(sym,key,val); break; case property_types::target_double: set_property_impl<Symbolizer, std::integral_constant<property_types, property_types::target_double> >::apply(sym,key,val); break; case property_types::target_color: set_property_impl<Symbolizer, std::integral_constant<property_types, property_types::target_color> >::apply(sym,key,val); break; default: break; } }
inline void set_property_from_value(Symbolizer & sym, mapnik::keys key, T const& val) { switch (std::get<3>(get_meta(key))) { case property_types::target_bool: put(sym, key, val.to_bool()); break; case property_types::target_integer: put(sym, key, val.to_int()); break; case property_types::target_double: put(sym, key, val.to_double()); break; case property_types::target_color: put(sym, key, mapnik::parse_color(val.to_string())); break; default: break; } }
//批量增加边,如果num不为空,则存储实际添加的边的数目,因为有些顶点可能不存在,添加就会失败 uint32_t Client::add_edges(list<Edge_u> &edges,uint32_t *num){ if(current_graph()=="") return STATUS_NOT_EXIST;//如果还没有连接图,则返回状态STATUS_NOT_EXIST //找所有边的元数据,由于有缓存,这个操作不会成为瓶颈。边在获取元数据的时候按照ip分类 unordered_map<string,list<Edge_u>*> classify; list<Edge_u>::iterator it=edges.begin(); unordered_map<string,list<Edge_u>*>::iterator it_cl; string ip; while(it!=edges.end()){ ip=get_meta(graph_name,(*it).s_id); it_cl=classify.find(ip); if(it_cl==classify.end()){ //如果没有这个ip类,则创建,然后把边加入到该ip类 classify.insert(pair<string,list<Edge_u>*>(ip,new list<Edge_u>())); it_cl=classify.find(ip); it_cl->second->push_back(*it); }else{ it_cl->second->push_back(*it); } it++; } //元数据查完后,就开始分别把每个ip类的边发送出去 if(num!=NULL) *num=0; it_cl=classify.begin(); while(it_cl!=classify.end()){ Requester req_slave(*find_sock(it_cl->first)); req_slave.ask(CMD_ADD_EDGES,*(it_cl->second),graph_name); req_slave.parse_ans(); if(num!=NULL){ //统计插入的边的数目 *num+=atoi((char*)req_slave.get_data()); } delete it_cl->second;//这个ip类的边插完了,则释放空间 it_cl++; } return STATUS_OK; }
void operator() (enumeration_wrapper const& val) const { std::string str = std::get<2>(get_meta(get_key(name_)))(val); node_.put("<xmlattr>." + name_, str); }
void item_button::draw_proc(draw_info in, bool draw_inside, bool draw_border, bool draw_connector, bool decrease_alpha, bool decrease_border_alpha, bool draw_container_opened_mark) { if (is_inventory_root()) return; auto parent_slot = item->get<components::item>().current_slot; rgba inside_col = cyan; rgba border_col = cyan; if (parent_slot->for_categorized_items_only) { border_col = pink; inside_col = violet; } inside_col.a = 20; border_col.a = 190; if (detector.is_hovered) { inside_col.a = 30; border_col.a = 220; } if (detector.current_appearance == decltype(detector)::appearance::pushed) { inside_col.a = 60; border_col.a = 255; } if (decrease_alpha) { inside_col.a = 15; } if (decrease_border_alpha) { border_col = slightly_visible_white; } if (draw_inside) { draw_stretched_texture(in, augs::gui::material(assets::texture_id::BLANK, inside_col)); iterate_children_attachments(true, &in.v, border_col); auto& item_data = item->get<components::item>(); float bottom_number_val = -1.f; auto* container = item->find<components::container>(); bool append_x = false; auto label_color = border_col; if (item_data.charges > 1) { bottom_number_val = item_data.charges; append_x = true; } else if (DRAW_FREE_SPACE_INSIDE_CONTAINER_ICONS && item[slot_function::ITEM_DEPOSIT].alive()) { bottom_number_val = item[slot_function::ITEM_DEPOSIT].calculate_free_space_with_parent_containers(); if (item[slot_function::ITEM_DEPOSIT]->for_categorized_items_only) label_color.rgb() = pink.rgb(); else label_color.rgb() = cyan.rgb(); } if (bottom_number_val > -1.f) { std::wstring label_wstr; if (append_x) { label_wstr = L'x'; label_wstr += augs::to_wstring(bottom_number_val); } else label_wstr = augs::to_wstring(bottom_number_val, 2); // else label_wstr = L'{' + label_wstr + L'}'; auto bottom_number = augs::gui::text::format(label_wstr, augs::gui::text::style(assets::font_id::GUI_FONT, label_color)); charges_caption.set_text(bottom_number); charges_caption.bottom_right(get_rect_absolute()); charges_caption.draw(in); } } if (draw_border) { augs::gui::solid_stroke stroke; stroke.set_material(augs::gui::material(assets::texture_id::BLANK, border_col)); stroke.draw(in.v, *this); } if (draw_connector && get_meta(parent_slot).gui_element_entity != parent_slot.container_entity) { draw_pixel_line_connector(get_rect_absolute(), get_meta(parent_slot.container_entity).get_rect_absolute(), in, border_col); } if (draw_container_opened_mark) { if (item->find<components::container>()) { components::sprite container_status_sprite; if(is_container_open) container_status_sprite.set(assets::CONTAINER_OPEN_ICON, border_col); else container_status_sprite.set(assets::CONTAINER_CLOSED_ICON, border_col); shared::state_for_drawing_renderable state; state.screen_space_mode = true; state.overridden_target_buffer = &in.v; state.renderable_transform.pos.set(get_rect_absolute().r - container_status_sprite.size.x + 2, get_rect_absolute().t + 1 //- container_status_sprite.size.y + 2 ); container_status_sprite.draw(state); } } }
void operator() (T const& val) const { std::cerr << std::get<0>(get_meta(key_)) << ":" << val << std::endl; //put<T>(sym_, key_, val); }
int read_asf_header(demuxer_t *demuxer,struct asf_priv* asf){ int hdr_len = asf->header.objh.size - sizeof(asf->header); int hdr_skip = 0; char *hdr = NULL; char guid_buffer[16]; int pos, start = stream_tell(demuxer->stream); uint32_t* streams = NULL; int audio_streams=0; int video_streams=0; uint16_t stream_count=0; int best_video = -1; int best_audio = -1; uint64_t data_len; ASF_stream_header_t *streamh; uint8_t *buffer; int audio_pos=0; if(hdr_len < 0) { mp_msg(MSGT_HEADER, MSGL_FATAL, "Header size is too small.\n"); return 0; } if (hdr_len > 1024 * 1024) { mp_msg(MSGT_HEADER, MSGL_ERR, MSGTR_MPDEMUX_ASFHDR_HeaderSizeOver1MB, hdr_len); hdr_skip = hdr_len - 1024 * 1024; hdr_len = 1024 * 1024; } hdr = malloc(hdr_len); if (!hdr) { mp_msg(MSGT_HEADER, MSGL_FATAL, MSGTR_MPDEMUX_ASFHDR_HeaderMallocFailed, hdr_len); return 0; } stream_read(demuxer->stream, hdr, hdr_len); if (hdr_skip) stream_skip(demuxer->stream, hdr_skip); if (stream_eof(demuxer->stream)) { mp_msg(MSGT_HEADER, MSGL_FATAL, MSGTR_MPDEMUX_ASFHDR_EOFWhileReadingHeader); goto err_out; } if (is_drm(hdr, hdr_len)) mp_msg(MSGT_HEADER, MSGL_FATAL, MSGTR_MPDEMUX_ASFHDR_DRMProtected); if ((pos = find_asf_guid(hdr, asf_ext_stream_audio, 0, hdr_len)) >= 0) { // Special case: found GUID for dvr-ms audio. // Now skip back to associated stream header. int sh_pos=0; sh_pos = find_backwards_asf_guid(hdr, asf_stream_header_guid, pos); if (sh_pos > 0) { sh_audio_t *sh_audio; mp_msg(MSGT_HEADER, MSGL_V, "read_asf_header found dvr-ms audio stream header pos=%d\n", sh_pos); // found audio stream header - following code reads header and // initializes audio stream. audio_pos = pos - 16 - 8; streamh = (ASF_stream_header_t *)&hdr[sh_pos]; le2me_ASF_stream_header_t(streamh); audio_pos += 64; //16+16+4+4+4+16+4; buffer = &hdr[audio_pos]; sh_audio=new_sh_audio(demuxer,streamh->stream_no & 0x7F, NULL); sh_audio->needs_parsing = 1; mp_msg(MSGT_DEMUX, MSGL_INFO, MSGTR_AudioID, "asfheader", streamh->stream_no & 0x7F); ++audio_streams; if (!asf_init_audio_stream(demuxer, asf, sh_audio, streamh, &audio_pos, &buffer, hdr, hdr_len)) goto len_err_out; if (!get_ext_stream_properties(hdr, hdr_len, streamh->stream_no, asf, 0)) goto len_err_out; } } // find stream headers // only reset pos if we didnt find dvr_ms audio stream // if we did find it then we want to avoid reading its header twice if (audio_pos == 0) pos = 0; while ((pos = find_asf_guid(hdr, asf_stream_header_guid, pos, hdr_len)) >= 0) { streamh = (ASF_stream_header_t *)&hdr[pos]; pos += sizeof(ASF_stream_header_t); if (pos > hdr_len) goto len_err_out; le2me_ASF_stream_header_t(streamh); mp_msg(MSGT_HEADER, MSGL_V, "stream type: %s\n", asf_chunk_type(streamh->type)); mp_msg(MSGT_HEADER, MSGL_V, "stream concealment: %s\n", asf_chunk_type(streamh->concealment)); mp_msg(MSGT_HEADER, MSGL_V, "type: %d bytes, stream: %d bytes ID: %d\n", (int)streamh->type_size, (int)streamh->stream_size, (int)streamh->stream_no); mp_msg(MSGT_HEADER, MSGL_V, "unk1: %lX unk2: %X\n", (unsigned long)streamh->unk1, (unsigned int)streamh->unk2); mp_msg(MSGT_HEADER, MSGL_V, "FILEPOS=0x%X\n", pos + start); // type-specific data: buffer = &hdr[pos]; pos += streamh->type_size; if (pos > hdr_len) goto len_err_out; switch(ASF_LOAD_GUID_PREFIX(streamh->type)){ case ASF_GUID_PREFIX_audio_stream: { sh_audio_t* sh_audio=new_sh_audio(demuxer,streamh->stream_no & 0x7F, NULL); mp_msg(MSGT_DEMUX, MSGL_INFO, MSGTR_AudioID, "asfheader", streamh->stream_no & 0x7F); ++audio_streams; if (!asf_init_audio_stream(demuxer, asf, sh_audio, streamh, &pos, &buffer, hdr, hdr_len)) goto len_err_out; //if(demuxer->audio->id==-1) demuxer->audio->id=streamh.stream_no & 0x7F; break; } case ASF_GUID_PREFIX_video_stream: { unsigned int len; float asp_ratio; sh_video_t* sh_video=new_sh_video(demuxer,streamh->stream_no & 0x7F); mp_msg(MSGT_DEMUX, MSGL_INFO, MSGTR_VideoID, "asfheader", streamh->stream_no & 0x7F); len=streamh->type_size-(4+4+1+2); ++video_streams; // sh_video->bih=malloc(chunksize); memset(sh_video->bih,0,chunksize); sh_video->bih=calloc((len<sizeof(*sh_video->bih))?sizeof(*sh_video->bih):len,1); memcpy(sh_video->bih,&buffer[4+4+1+2],len); le2me_BITMAPINFOHEADER(sh_video->bih); if (sh_video->bih->biSize > len && sh_video->bih->biSize > sizeof(*sh_video->bih)) sh_video->bih->biSize = len; if (sh_video->bih->biCompression == mmioFOURCC('D', 'V', 'R', ' ')) { //mp_msg(MSGT_DEMUXER, MSGL_WARN, MSGTR_MPDEMUX_ASFHDR_DVRWantsLibavformat); //sh_video->fps=(float)sh_video->video.dwRate/(float)sh_video->video.dwScale; //sh_video->frametime=(float)sh_video->video.dwScale/(float)sh_video->video.dwRate; asf->asf_frame_state=-1; asf->asf_frame_start_found=0; asf->asf_is_dvr_ms=1; asf->dvr_last_vid_pts=0.0; } else asf->asf_is_dvr_ms=0; if (!get_ext_stream_properties(hdr, hdr_len, streamh->stream_no, asf, 1)) goto len_err_out; if (get_meta(hdr, hdr_len, streamh->stream_no, &asp_ratio)) { sh_video->aspect = asp_ratio * sh_video->bih->biWidth / sh_video->bih->biHeight; } sh_video->i_bps = asf->bps; if( mp_msg_test(MSGT_DEMUX,MSGL_V) ) print_video_header(sh_video->bih, MSGL_V); //asf_video_id=streamh.stream_no & 0x7F; //if(demuxer->video->id==-1) demuxer->video->id=streamh.stream_no & 0x7F; break; } } // stream-specific data: // stream_read(demuxer->stream,(char*) buffer,streamh.stream_size); } // find file header pos = find_asf_guid(hdr, asf_file_header_guid, 0, hdr_len); if (pos >= 0) { ASF_file_header_t *fileh = (ASF_file_header_t *)&hdr[pos]; pos += sizeof(ASF_file_header_t); if (pos > hdr_len) goto len_err_out; le2me_ASF_file_header_t(fileh); mp_msg(MSGT_HEADER, MSGL_V, "ASF: packets: %d flags: %d " "max_packet_size: %d min_packet_size: %d max_bitrate: %d " "preroll: %d\n", (int)fileh->num_packets, (int)fileh->flags, (int)fileh->min_packet_size, (int)fileh->max_packet_size, (int)fileh->max_bitrate, (int)fileh->preroll); asf->packetsize=fileh->max_packet_size; asf->packet=malloc(asf->packetsize); // !!! asf->packetrate=fileh->max_bitrate/8.0/(double)asf->packetsize; asf->movielength=(fileh->play_duration-10000*fileh->preroll)/10000000.0; } // find content header pos = find_asf_guid(hdr, asf_content_desc_guid, 0, hdr_len); if (pos >= 0) { ASF_content_description_t *contenth = (ASF_content_description_t *)&hdr[pos]; char *string=NULL; uint16_t* wstring = NULL; uint16_t len; pos += sizeof(ASF_content_description_t); if (pos > hdr_len) goto len_err_out; le2me_ASF_content_description_t(contenth); mp_msg(MSGT_HEADER,MSGL_V,"\n"); // extract the title if((len = contenth->title_size) != 0) { wstring = (uint16_t*)&hdr[pos]; pos += len; if (pos > hdr_len) goto len_err_out; if ((string = get_ucs2str(wstring, len))) { mp_msg(MSGT_HEADER,MSGL_V," Title: %s\n", string); demux_info_add(demuxer, "title", string); free(string); } } // extract the author if((len = contenth->author_size) != 0) { wstring = (uint16_t*)&hdr[pos]; pos += len; if (pos > hdr_len) goto len_err_out; if ((string = get_ucs2str(wstring, len))) { mp_msg(MSGT_HEADER,MSGL_V," Author: %s\n", string); demux_info_add(demuxer, "author", string); free(string); } } // extract the copyright if((len = contenth->copyright_size) != 0) { wstring = (uint16_t*)&hdr[pos]; pos += len; if (pos > hdr_len) goto len_err_out; if ((string = get_ucs2str(wstring, len))) { mp_msg(MSGT_HEADER,MSGL_V," Copyright: %s\n", string); demux_info_add(demuxer, "copyright", string); free(string); } } // extract the comment if((len = contenth->comment_size) != 0) { wstring = (uint16_t*)&hdr[pos]; pos += len; if (pos > hdr_len) goto len_err_out; if ((string = get_ucs2str(wstring, len))) { mp_msg(MSGT_HEADER,MSGL_V," Comment: %s\n", string); demux_info_add(demuxer, "comments", string); free(string); } } // extract the rating if((len = contenth->rating_size) != 0) { wstring = (uint16_t*)&hdr[pos]; pos += len; if (pos > hdr_len) goto len_err_out; if ((string = get_ucs2str(wstring, len))) { mp_msg(MSGT_HEADER,MSGL_V," Rating: %s\n", string); free(string); } } mp_msg(MSGT_HEADER,MSGL_V,"\n"); } // find content header pos = find_asf_guid(hdr, asf_stream_group_guid, 0, hdr_len); if (pos >= 0) { int max_streams = (hdr_len - pos - 2) / 6; uint16_t stream_id, i; uint32_t max_bitrate; char *ptr = &hdr[pos]; mp_msg(MSGT_HEADER,MSGL_V,"============ ASF Stream group == START ===\n"); if(max_streams <= 0) goto len_err_out; stream_count = AV_RL16(ptr); ptr += sizeof(uint16_t); if(stream_count > max_streams) stream_count = max_streams; if(stream_count > 0) streams = malloc(2*stream_count*sizeof(uint32_t)); mp_msg(MSGT_HEADER,MSGL_V," stream count=[0x%x][%u]\n", stream_count, stream_count ); for( i=0 ; i<stream_count ; i++ ) { stream_id = AV_RL16(ptr); ptr += sizeof(uint16_t); max_bitrate = AV_RL32(ptr); ptr += sizeof(uint32_t); mp_msg(MSGT_HEADER,MSGL_V," stream id=[0x%x][%u]\n", stream_id, stream_id ); mp_msg(MSGT_HEADER,MSGL_V," max bitrate=[0x%x][%u]\n", max_bitrate, max_bitrate ); streams[2*i] = stream_id; streams[2*i+1] = max_bitrate; } mp_msg(MSGT_HEADER,MSGL_V,"============ ASF Stream group == END ===\n"); } free(hdr); hdr = NULL; start = stream_tell(demuxer->stream); // start of first data chunk stream_read(demuxer->stream, guid_buffer, 16); if (memcmp(guid_buffer, asf_data_chunk_guid, 16) != 0) { mp_msg(MSGT_HEADER, MSGL_FATAL, MSGTR_MPDEMUX_ASFHDR_NoDataChunkAfterHeader); free(streams); streams = NULL; return 0; } // read length of chunk data_len = stream_read_qword_le(demuxer->stream); demuxer->movi_start = stream_tell(demuxer->stream) + 26; demuxer->movi_end = start + data_len; mp_msg(MSGT_HEADER, MSGL_V, "Found movie at 0x%X - 0x%X\n", (int)demuxer->movi_start, (int)demuxer->movi_end); if(streams) { // stream selection is done in the network code, it shouldn't be done here // as the servers often do not care about what we requested. #if 0 uint32_t vr = 0, ar = 0,i; #ifdef CONFIG_NETWORKING if( demuxer->stream->streaming_ctrl!=NULL ) { if( demuxer->stream->streaming_ctrl->bandwidth!=0 && demuxer->stream->streaming_ctrl->data!=NULL ) { best_audio = ((asf_http_streaming_ctrl_t*)demuxer->stream->streaming_ctrl->data)->audio_id; best_video = ((asf_http_streaming_ctrl_t*)demuxer->stream->streaming_ctrl->data)->video_id; } } else #endif for(i = 0; i < stream_count; i++) { uint32_t id = streams[2*i]; uint32_t rate = streams[2*i+1]; if(demuxer->v_streams[id] && rate > vr) { vr = rate; best_video = id; } else if(demuxer->a_streams[id] && rate > ar) { ar = rate; best_audio = id; } } #endif free(streams); streams = NULL; } mp_msg(MSGT_HEADER,MSGL_V,"ASF: %d audio and %d video streams found\n",audio_streams,video_streams); if(!audio_streams) demuxer->audio->id=-2; // nosound else if(best_audio > 0 && demuxer->audio->id == -1) demuxer->audio->id=best_audio; if(!video_streams){ if(!audio_streams){ mp_msg(MSGT_HEADER,MSGL_ERR,MSGTR_MPDEMUX_ASFHDR_AudioVideoHeaderNotFound); return 0; } demuxer->video->id=-2; // audio-only } else if (best_video > 0 && demuxer->video->id == -1) demuxer->video->id = best_video; #if 0 if( mp_msg_test(MSGT_HEADER,MSGL_V) ){ printf("ASF duration: %d\n",(int)fileh.duration); printf("ASF start pts: %d\n",(int)fileh.start_timestamp); printf("ASF end pts: %d\n",(int)fileh.end_timestamp); } #endif return 1; len_err_out: mp_msg(MSGT_HEADER, MSGL_FATAL, MSGTR_MPDEMUX_ASFHDR_InvalidLengthInASFHeader); err_out: if (hdr) free(hdr); if (streams) free(streams); return 0; }
/* aiff input */ static int aiff_read_header(AVFormatContext *s) { int ret, size, filesize; int64_t offset = 0, position; uint32_t tag; unsigned version = AIFF_C_VERSION1; AVIOContext *pb = s->pb; AVStream * st; AIFFInputContext *aiff = s->priv_data; ID3v2ExtraMeta *id3v2_extra_meta = NULL; /* check FORM header */ filesize = get_tag(pb, &tag); if (filesize < 0 || tag != MKTAG('F', 'O', 'R', 'M')) return AVERROR_INVALIDDATA; /* AIFF data type */ tag = avio_rl32(pb); if (tag == MKTAG('A', 'I', 'F', 'F')) /* Got an AIFF file */ version = AIFF; else if (tag != MKTAG('A', 'I', 'F', 'C')) /* An AIFF-C file then */ return AVERROR_INVALIDDATA; filesize -= 4; st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); while (filesize > 0) { /* parse different chunks */ size = get_tag(pb, &tag); if (size < 0) return size; filesize -= size + 8; switch (tag) { case MKTAG('C', 'O', 'M', 'M'): /* Common chunk */ /* Then for the complete header info */ st->nb_frames = get_aiff_header(s, size, version); if (st->nb_frames < 0) return st->nb_frames; if (offset > 0) // COMM is after SSND goto got_sound; break; case MKTAG('I', 'D', '3', ' '): position = avio_tell(pb); ff_id3v2_read(s, ID3v2_DEFAULT_MAGIC, &id3v2_extra_meta); if (id3v2_extra_meta) if ((ret = ff_id3v2_parse_apic(s, &id3v2_extra_meta)) < 0) { ff_id3v2_free_extra_meta(&id3v2_extra_meta); return ret; } ff_id3v2_free_extra_meta(&id3v2_extra_meta); if (position + size > avio_tell(pb)) avio_skip(pb, position + size - avio_tell(pb)); break; case MKTAG('F', 'V', 'E', 'R'): /* Version chunk */ version = avio_rb32(pb); break; case MKTAG('N', 'A', 'M', 'E'): /* Sample name chunk */ get_meta(s, "title" , size); break; case MKTAG('A', 'U', 'T', 'H'): /* Author chunk */ get_meta(s, "author" , size); break; case MKTAG('(', 'c', ')', ' '): /* Copyright chunk */ get_meta(s, "copyright", size); break; case MKTAG('A', 'N', 'N', 'O'): /* Annotation chunk */ get_meta(s, "comment" , size); break; case MKTAG('S', 'S', 'N', 'D'): /* Sampled sound chunk */ aiff->data_end = avio_tell(pb) + size; offset = avio_rb32(pb); /* Offset of sound data */ avio_rb32(pb); /* BlockSize... don't care */ offset += avio_tell(pb); /* Compute absolute data offset */ if (st->codec->block_align && !pb->seekable) /* Assume COMM already parsed */ goto got_sound; if (!pb->seekable) { av_log(s, AV_LOG_ERROR, "file is not seekable\n"); return -1; } avio_skip(pb, size - 8); break; case MKTAG('w', 'a', 'v', 'e'): if ((uint64_t)size > (1<<30)) return -1; st->codec->extradata = av_mallocz(size + FF_INPUT_BUFFER_PADDING_SIZE); if (!st->codec->extradata) return AVERROR(ENOMEM); st->codec->extradata_size = size; avio_read(pb, st->codec->extradata, size); if (st->codec->codec_id == AV_CODEC_ID_QDM2 && size>=12*4 && !st->codec->block_align) { st->codec->block_align = AV_RB32(st->codec->extradata+11*4); aiff->block_duration = AV_RB32(st->codec->extradata+9*4); } else if (st->codec->codec_id == AV_CODEC_ID_QCELP) { char rate = 0; if (size >= 25) rate = st->codec->extradata[24]; switch (rate) { case 'H': // RATE_HALF st->codec->block_align = 17; break; case 'F': // RATE_FULL default: st->codec->block_align = 35; } aiff->block_duration = 160; st->codec->bit_rate = st->codec->sample_rate * (st->codec->block_align << 3) / aiff->block_duration; } break; case MKTAG('C','H','A','N'): if(ff_mov_read_chan(s, pb, st, size) < 0) return AVERROR_INVALIDDATA; break; default: /* Jump */ if (size & 1) /* Always even aligned */ size++; avio_skip(pb, size); } } got_sound: if (!st->codec->block_align) { av_log(s, AV_LOG_ERROR, "could not find COMM tag or invalid block_align value\n"); return -1; } /* Now positioned, get the sound data start and end */ avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate); st->start_time = 0; st->duration = st->nb_frames * aiff->block_duration; /* Position the stream at the first block */ avio_seek(pb, offset, SEEK_SET); return 0; }
static GwyContainer* sly_load(const gchar *filename, G_GNUC_UNUSED GwyRunType mode, GError **error) { GwyContainer *container = NULL, *meta = NULL; gchar *buffer = NULL; GError *err = NULL; GHashTable *hash = NULL; gchar *p, *line, *value; guint expecting_data = 0; SensolyticsChannel *channels = NULL; Dimensions dimensions; gint ndata = 0, i; if (!g_file_get_contents(filename, &buffer, NULL, &err)) { err_GET_FILE_CONTENTS(error, &err); return NULL; } p = buffer; line = gwy_str_next_line(&p); g_strstrip(line); if (!gwy_strequal(line, MAGIC)) { err_FILE_TYPE(error, "Sensolytics"); goto fail; } hash = g_hash_table_new(g_str_hash, g_str_equal); for (line = gwy_str_next_line(&p); line; line = gwy_str_next_line(&p)) { if (!line[0]) continue; if (expecting_data) { expecting_data--; /* The columns are comma-separated and numbers use decimal points. * Do not tempt the number parsing functions more than necessary * and fix commas to tab characters. */ g_strdelimit(line, ",", '\t'); /* Ignore X, Y and Z, each is two values */ for (i = 0; i < 6; i++) g_ascii_strtod(line, &line); for (i = 0; i < ndata; i++) channels[i].data[expecting_data] = channels[i].q * g_ascii_strtod(line, &line); } else { g_strstrip(line); if (line[0] != '#') { g_warning("Comment line does not start with #."); continue; } do { line++; } while (g_ascii_isspace(*line)); if (g_str_has_prefix(line, "X [")) { if (channels) { g_warning("Multiple data headers!?"); continue; } if (!read_dimensions(hash, &ndata, &dimensions, error) || !(channels = create_fields(hash, line, ndata, &dimensions))) goto fail; expecting_data = dimensions.xres * dimensions.yres; continue; } value = strchr(line, ':'); if (!value) { if (!gwy_strequal(line, "ArrayScan")) g_warning("Non-parameter-like line %s", line); continue; } *value = '\0'; g_strchomp(line); do { value++; } while (g_ascii_isspace(*value)); if (gwy_strequal(line, "Warning")) continue; gwy_debug("<%s>=<%s>", line, value); g_hash_table_insert(hash, line, value); } } if (!channels) { err_NO_DATA(error); goto fail; } container = gwy_container_new(); for (i = 0; i < ndata; i++) { GQuark key = gwy_app_get_data_key_for_id(i); gwy_data_field_invert(channels[i].dfield, FALSE, TRUE, FALSE); gwy_container_set_object(container, key, channels[i].dfield); gwy_app_channel_check_nonsquare(container, i); if (channels[i].name) { gchar *s = g_strconcat(g_quark_to_string(key), "/title", NULL); gwy_container_set_string_by_name(container, s, g_strdup(channels[i].name)); g_free(s); } else gwy_app_channel_title_fall_back(container, i); gwy_file_channel_import_log_add(container, i, NULL, filename); } meta = get_meta(hash); clone_meta(container, meta, ndata); g_object_unref(meta); fail: g_free(buffer); if (hash) g_hash_table_destroy(hash); if (channels) { for (i = 0; i < ndata; i++) g_object_unref(channels[i].dfield); g_free(channels); } return container; }
/* aiff input */ static int aiff_read_header(AVFormatContext *s, AVFormatParameters *ap) { int size, filesize; int64_t offset = 0; uint32_t tag; unsigned version = AIFF_C_VERSION1; AVIOContext *pb = s->pb; AVStream * st; AIFFInputContext *aiff = s->priv_data; /* check FORM header */ filesize = get_tag(pb, &tag); if (filesize < 0 || tag != MKTAG('F', 'O', 'R', 'M')) return AVERROR_INVALIDDATA; /* AIFF data type */ tag = avio_rl32(pb); if (tag == MKTAG('A', 'I', 'F', 'F')) /* Got an AIFF file */ version = AIFF; else if (tag != MKTAG('A', 'I', 'F', 'C')) /* An AIFF-C file then */ return AVERROR_INVALIDDATA; filesize -= 4; st = av_new_stream(s, 0); if (!st) return AVERROR(ENOMEM); while (filesize > 0) { /* parse different chunks */ size = get_tag(pb, &tag); if (size < 0) return size; filesize -= size + 8; switch (tag) { case MKTAG('C', 'O', 'M', 'M'): /* Common chunk */ /* Then for the complete header info */ st->nb_frames = get_aiff_header(pb, st->codec, size, version); if (st->nb_frames < 0) return st->nb_frames; if (offset > 0) // COMM is after SSND goto got_sound; break; case MKTAG('F', 'V', 'E', 'R'): /* Version chunk */ version = avio_rb32(pb); break; case MKTAG('N', 'A', 'M', 'E'): /* Sample name chunk */ get_meta(s, "title" , size); break; case MKTAG('A', 'U', 'T', 'H'): /* Author chunk */ get_meta(s, "author" , size); break; case MKTAG('(', 'c', ')', ' '): /* Copyright chunk */ get_meta(s, "copyright", size); break; case MKTAG('A', 'N', 'N', 'O'): /* Annotation chunk */ get_meta(s, "comment" , size); break; case MKTAG('S', 'S', 'N', 'D'): /* Sampled sound chunk */ aiff->data_end = avio_tell(pb) + size; offset = avio_rb32(pb); /* Offset of sound data */ avio_rb32(pb); /* BlockSize... don't care */ offset += avio_tell(pb); /* Compute absolute data offset */ if (st->codec->block_align) /* Assume COMM already parsed */ goto got_sound; if (!pb->seekable) { av_log(s, AV_LOG_ERROR, "file is not seekable\n"); return -1; } avio_skip(pb, size - 8); break; case MKTAG('w', 'a', 'v', 'e'): if ((uint64_t)size > (1<<30)) return -1; st->codec->extradata = av_mallocz(size + FF_INPUT_BUFFER_PADDING_SIZE); if (!st->codec->extradata) return AVERROR(ENOMEM); st->codec->extradata_size = size; avio_read(pb, st->codec->extradata, size); break; default: /* Jump */ if (size & 1) /* Always even aligned */ size++; avio_skip(pb, size); } } if (!st->codec->block_align) { av_log(s, AV_LOG_ERROR, "could not find COMM tag\n"); return -1; } got_sound: /* Now positioned, get the sound data start and end */ if (st->nb_frames) s->file_size = st->nb_frames * st->codec->block_align; av_set_pts_info(st, 64, 1, st->codec->sample_rate); st->start_time = 0; st->duration = st->codec->frame_size ? st->nb_frames * st->codec->frame_size : st->nb_frames; /* Position the stream at the first block */ avio_seek(pb, offset, SEEK_SET); return 0; }
std::string FBVLCMediaDescAPI::get_setting() { return get_meta( libvlc_meta_Setting ); }