Esempio n. 1
0
static ModelCache::iterator ModelCache_find (const std::string& path, const std::string& name)
{
    if (g_modelCache_enabled) {
        return g_modelCache.find(ModelKey(path, name));
    }
    return g_modelCache.end();
}
Esempio n. 2
0
ModelCache::iterator ModelCache_find(const char* path, const char* name)
{
  if(g_modelCache_enabled)
  {
    return g_modelCache.find(ModelKey(path, name));
  }
  return g_modelCache.end();
}
Esempio n. 3
0
void ModelCache_flush (const std::string& path, const std::string& name)
{
    ModelCache::iterator i = g_modelCache.find(ModelKey(path, name));
    if (i != g_modelCache.end()) {
        //ASSERT_MESSAGE((*i).value.getCount() == 0, "resource flushed while still in use: " << (*i).key.first.c_str() << (*i).key.second.c_str());
        g_modelCache.erase(i);
    }
}
Esempio n. 4
0
    void setNode (scene::Node* node)
    {
        ModelCache::iterator i = ModelCache_find(m_path, m_name);
        if (i != g_modelCache.end()) {
            (*i).value = NodeSmartReference(*node);
        }
        setModel(NodeSmartReference(*node));

        connectMap();
    }
Esempio n. 5
0
    void loadCached ()
    {
        if (g_modelCache_enabled) {
            // cache lookup
            ModelCache::iterator i = ModelCache_find(m_path, m_name);
            if (i == g_modelCache.end()) {
                i = ModelCache_insert(m_path, m_name, Model_load(m_loader, m_path, m_name, m_type));
            }

            setModel((*i).value);
        } else {
            setModel(Model_load(m_loader, m_path, m_name, m_type));
        }
    }