void CGUIDialogContentSettings::SetupPage() { if (m_content == CONTENT_NONE) { m_bShowScanSettings = false; SET_CONTROL_HIDDEN(CONTROL_SCRAPER_LIST); CONTROL_DISABLE(CONTROL_SCRAPER_SETTINGS); } else { FillListControl(); SET_CONTROL_VISIBLE(CONTROL_SCRAPER_LIST); if (m_scraper && m_scraper->Enabled()) { m_bShowScanSettings = true; ScraperPtr scraper = boost::dynamic_pointer_cast<CScraper>(m_scraper); if (scraper && scraper->Supports(m_content) && scraper->HasSettings()) CONTROL_ENABLE(CONTROL_SCRAPER_SETTINGS); } else CONTROL_DISABLE(CONTROL_SCRAPER_SETTINGS); } CreateSettings(); CGUIDialogSettings::SetupPage(); SET_CONTROL_VISIBLE(CONTROL_CONTENT_TYPE); }
bool CMusicInfoScanner::ResolveMusicBrainz(const std::string &strMusicBrainzID, const ScraperPtr &preferredScraper, CScraperUrl &musicBrainzURL) { // We have a MusicBrainz ID // Get a scraper that can resolve it to a MusicBrainz URL & force our // search directly to the specific album. bool bMusicBrainz = false; try { musicBrainzURL = preferredScraper->ResolveIDToUrl(strMusicBrainzID); } catch (const ADDON::CScraperError &sce) { if (sce.FAborted()) return false; } if (!musicBrainzURL.m_url.empty()) { Sleep(2000); // MusicBrainz rate-limits queries to 1 p.s - once we hit the rate-limiter // they start serving up the 'you hit the rate-limiter' page fast - meaning // we will never get below the rate-limit threshold again in a specific run. // This helps us to avoidthe rate-limiter as far as possible. CLog::Log(LOGDEBUG,"-- nfo-scraper: %s",preferredScraper->Name().c_str()); CLog::Log(LOGDEBUG,"-- nfo url: %s", musicBrainzURL.m_url[0].m_url.c_str()); bMusicBrainz = true; } return bMusicBrainz; }
oFile::Scrape(ScraperPtr& scraper, const CStdString& strURL /* = "" */) { if (scraper->Type() != m_type) { return 1; } if (!scraper->Load()) return 0; // init and clear cache scraper->ClearCache(); vector<CStdString> extras; CScraperUrl url; CFileCurl http; if (strURL.IsEmpty()) { if (!DoScrape(scraper)) return 2; if (m_strImDbUrl.size() > 0) return 0; else return 1; } else // we check to identify the episodeguide url { extras.push_back(strURL); vector<CStdString> result = scraper->Run("EpisodeGuideUrl",url,http,&extras); if (result.empty() || result[0].IsEmpty()) return 1; return 0; } }
void CGUIDialogContentSettings::SetupPage() { CGUIMessage msgReset(GUI_MSG_LABEL_RESET, GetID(), CONTROL_SCRAPER_LIST); OnMessage(msgReset); m_vecItems->Clear(); if (m_content == CONTENT_NONE) { m_bShowScanSettings = false; SET_CONTROL_HIDDEN(CONTROL_SCRAPER_LIST); CONTROL_DISABLE(CONTROL_SCRAPER_SETTINGS); } else { FillListControl(); SET_CONTROL_VISIBLE(CONTROL_SCRAPER_LIST); if (m_scraper && m_scraper->Enabled()) { m_bShowScanSettings = true; ScraperPtr scraper = boost::dynamic_pointer_cast<CScraper>(m_scraper); if (scraper && scraper->Supports(m_content) && scraper->HasSettings()) CONTROL_ENABLE(CONTROL_SCRAPER_SETTINGS); } else CONTROL_DISABLE(CONTROL_SCRAPER_SETTINGS); } CreateSettings(); CGUIDialogSettings::SetupPage(); SET_CONTROL_VISIBLE(CONTROL_CONTENT_TYPE); }
// return value: 0 - success; 1 - no result; skip; 2 - error int CNfoFile::Scrape(ScraperPtr& scraper) { if (scraper->IsNoop()) { m_scurl = CScraperUrl(); return 0; } if (scraper->Type() != m_type) return 1; scraper->ClearCache(); try { m_scurl = scraper->NfoUrl(m_doc); } catch (const CScraperError &sce) { CVideoInfoDownloader::ShowErrorDialog(sce); if (!sce.FAborted()) return 2; } if (!m_scurl.m_url.empty()) SetScraperInfo(scraper); return m_scurl.m_url.empty() ? 1 : 0; }
void CNfoFile::AddScrapers(VECADDONS& addons, vector<ScraperPtr>& vecScrapers) { for (unsigned i=0;i<addons.size();++i) { ScraperPtr scraper = boost::dynamic_pointer_cast<CScraper>(addons[i]); // skip if scraper requires settings and there's nothing set yet if (scraper->RequiresSettings() && !scraper->HasUserSettings()) continue; // add same language and multi-language if (scraper->Language() == m_info->Language() || scraper->Language().Equals("multi")) vecScrapers.push_back(scraper); } }
bool CNfoFile::DoScrape(ScraperPtr& scraper) { vector<CStdString> extras; extras.push_back(m_doc); CScraperUrl url; CFileCurl http; vector<CStdString> xml; if (scraper->GetParser().HasFunction("NfoUrl")) xml = scraper->Run("NfoUrl",url,http,&extras); for (vector<CStdString>::iterator it = xml.begin(); it != xml.end(); ++it) { TiXmlDocument doc; doc.Parse(it->c_str()); if (doc.RootElement()) { if (stricmp(doc.RootElement()->Value(),"error")==0) { CIMDB::ShowErrorDialog(doc.RootElement()); return false; } TiXmlElement* pId = doc.FirstChildElement("id"); if (pId && pId->FirstChild()) m_strImDbNr = pId->FirstChild()->Value(); TiXmlElement* url = doc.FirstChildElement("url"); if (url) { stringstream str; str << *url; m_strImDbUrl = str.str(); SetScraperInfo(scraper); } else if (strcmp(doc.RootElement()->Value(),"url")==0) { SetScraperInfo(scraper); m_strImDbUrl = *it; } } } return true; }
int CNfoFile::Scrape(ScraperPtr& scraper, const CStdString& strURL /* = "" */) { if (scraper->Type() != m_type) { return 1; } if (!scraper->Load()) return 0; // init and clear cache scraper->ClearCache(); vector<CStdString> extras; CScraperUrl url; CFileCurl http; if (strURL.IsEmpty()) { extras.push_back(m_doc); vector<CStdString> result; if (scraper->GetParser().HasFunction("NfoScrape")) result = scraper->Run("NfoScrape",url,http,&extras); if (!result.empty()) { TiXmlDocument doc; doc.Parse(m_strImDbUrl.c_str()); if (doc.RootElement() && doc.RootElement()->FirstChildElement()) { CVideoInfoTag details; if (GetDetails(details,m_strImDbUrl.c_str())) { Close(); m_size = m_strImDbUrl.size(); m_doc = new char[m_size+1]; m_headofdoc = m_doc; strcpy(m_doc,m_strImDbUrl.c_str()); return 0; } } } if (!DoScrape(scraper)) return 2; if (m_strImDbUrl.size() > 0) return 0; else return 1; } else // we check to identify the episodeguide url { extras.push_back(strURL); vector<CStdString> result = scraper->Run("EpisodeGuideUrl",url,http,&extras); if (result.empty() || result[0].IsEmpty()) return 1; return 0; } }
CNfoFile::NFOResult CNfoFile::Create(const CStdString& strPath, const ScraperPtr& info, int episode, const CStdString& strPath2) { m_info = info; // assume we can use these settings m_type = ScraperTypeFromContent(info->Content()); if (FAILED(Load(strPath))) return NO_NFO; CFileItemList items; bool bNfo=false; AddonPtr addon; ScraperPtr defaultScraper; if (!CAddonMgr::Get().GetDefault(m_type, addon)) return NO_NFO; else defaultScraper = boost::dynamic_pointer_cast<CScraper>(addon); if (m_type == ADDON_SCRAPER_ALBUMS) { CAlbum album; bNfo = GetDetails(album); } else if (m_type == ADDON_SCRAPER_ARTISTS) { CArtist artist; bNfo = GetDetails(artist); } else if (m_type == ADDON_SCRAPER_TVSHOWS || m_type == ADDON_SCRAPER_MOVIES || m_type == ADDON_SCRAPER_MUSICVIDEOS) { // first check if it's an XML file with the info we need CVideoInfoTag details; bNfo = GetDetails(details); if (episode > -1 && bNfo && m_type == ADDON_SCRAPER_TVSHOWS) { int infos=0; while (m_headofdoc && details.m_iEpisode != episode) { m_headofdoc = strstr(m_headofdoc+1,"<episodedetails>"); bNfo = GetDetails(details); infos++; } if (details.m_iEpisode != episode) { bNfo = false; details.Reset(); m_headofdoc = m_doc; if (infos == 1) // still allow differing nfo/file numbers for single ep nfo's bNfo = GetDetails(details); } } } vector<ScraperPtr> vecScrapers; // add selected scraper if (m_info) vecScrapers.push_back(m_info); VECADDONS addons; CAddonMgr::Get().GetAddons(m_type,addons); // first pass - add language based scrapers if (m_info && g_guiSettings.GetBool("scrapers.langfallback")) AddScrapers(addons,vecScrapers); // add default scraper if ((m_info && m_info->ID() != defaultScraper->ID()) || !m_info) vecScrapers.push_back(defaultScraper); // search .. int res = -1; for (unsigned int i=0;i<vecScrapers.size();++i) if ((res = Scrape(vecScrapers[i])) == 0 || res == 2) break; if (res == 2) return ERROR_NFO; if (bNfo) return (m_strImDbUrl.size() > 0) ? COMBINED_NFO:FULL_NFO; return (m_strImDbUrl.size() > 0) ? URL_NFO : NO_NFO; }
CNfoFile::NFOResult CNfoFile::Create(const CStdString& strPath, const ScraperPtr& info, int episode, const CStdString& strPath2) { m_info = info; // assume we can use these settings m_type = ScraperTypeFromContent(info->Content()); if (FAILED(Load(strPath))) return NO_NFO; CFileItemList items; bool bNfo=false; AddonPtr addon; ScraperPtr defaultScraper; if (CAddonMgr::Get().GetDefault(m_type, addon)) defaultScraper = boost::dynamic_pointer_cast<CScraper>(addon); if (m_type == ADDON_SCRAPER_ALBUMS) { CAlbum album; bNfo = GetDetails(album); } else if (m_type == ADDON_SCRAPER_ARTISTS) { CArtist artist; bNfo = GetDetails(artist); } else if (m_type == ADDON_SCRAPER_TVSHOWS || m_type == ADDON_SCRAPER_MOVIES || m_type == ADDON_SCRAPER_MUSICVIDEOS) { // first check if it's an XML file with the info we need CVideoInfoTag details; bNfo = GetDetails(details); if (episode > -1 && bNfo && m_type == ADDON_SCRAPER_TVSHOWS) { int infos=0; while (m_headofdoc && details.m_iEpisode != episode) { m_headofdoc = strstr(m_headofdoc+1,"<episodedetails"); bNfo = GetDetails(details); infos++; } if (details.m_iEpisode != episode) { bNfo = false; details.Reset(); m_headofdoc = m_doc; if (infos == 1) // still allow differing nfo/file numbers for single ep nfo's bNfo = GetDetails(details); } } } vector<ScraperPtr> vecScrapers; // add selected scraper - first proirity if (m_info) vecScrapers.push_back(m_info); // Add all scrapers except selected and default VECADDONS addons; CAddonMgr::Get().GetAddons(m_type,addons); for (unsigned i = 0; i < addons.size(); ++i) { ScraperPtr scraper = boost::dynamic_pointer_cast<CScraper>(addons[i]); // skip if scraper requires settings and there's nothing set yet if (scraper->RequiresSettings() && !scraper->HasUserSettings()) continue; if( (!m_info || m_info->ID() != scraper->ID()) && (!defaultScraper || defaultScraper->ID() != scraper->ID()) ) vecScrapers.push_back(scraper); } // add default scraper - not user selectable so it's last priority if( defaultScraper && (!m_info || m_info->ID() != defaultScraper->ID()) && ( !defaultScraper->RequiresSettings() || defaultScraper->HasUserSettings() ) ) vecScrapers.push_back(defaultScraper); // search .. int res = -1; for (unsigned int i=0;i<vecScrapers.size();++i) if ((res = Scrape(vecScrapers[i])) == 0 || res == 2) break; if (res == 2) return ERROR_NFO; if (bNfo) return m_scurl.m_url.empty() ? FULL_NFO : COMBINED_NFO; return m_scurl.m_url.empty() ? NO_NFO : URL_NFO; }