static ModelCache::iterator ModelCache_find (const std::string& path, const std::string& name) { if (g_modelCache_enabled) { return g_modelCache.find(ModelKey(path, name)); } return g_modelCache.end(); }
ModelCache::iterator ModelCache_find(const char* path, const char* name) { if(g_modelCache_enabled) { return g_modelCache.find(ModelKey(path, name)); } return g_modelCache.end(); }
void ModelCache_flush (const std::string& path, const std::string& name) { ModelCache::iterator i = g_modelCache.find(ModelKey(path, name)); if (i != g_modelCache.end()) { //ASSERT_MESSAGE((*i).value.getCount() == 0, "resource flushed while still in use: " << (*i).key.first.c_str() << (*i).key.second.c_str()); g_modelCache.erase(i); } }
void setNode (scene::Node* node) { ModelCache::iterator i = ModelCache_find(m_path, m_name); if (i != g_modelCache.end()) { (*i).value = NodeSmartReference(*node); } setModel(NodeSmartReference(*node)); connectMap(); }
void loadCached () { if (g_modelCache_enabled) { // cache lookup ModelCache::iterator i = ModelCache_find(m_path, m_name); if (i == g_modelCache.end()) { i = ModelCache_insert(m_path, m_name, Model_load(m_loader, m_path, m_name, m_type)); } setModel((*i).value); } else { setModel(Model_load(m_loader, m_path, m_name, m_type)); } }