Exemplo n.º 1
0
bool CCurlFile::GetMimeType(const CURL &url, CStdString &content, CStdString useragent)
{
  CCurlFile file;
  if (!useragent.IsEmpty())
    file.SetUserAgent(useragent);

  struct __stat64 buffer;
  if( file.Stat(url, &buffer) == 0 )
  {
    if (buffer.st_mode == _S_IFDIR)
      content = "x-directory/normal";
    else
      content = file.GetMimeType();
    CLog::Log(LOGDEBUG, "CCurlFile::GetMimeType - %s -> %s", url.Get().c_str(), content.c_str());
    return true;
  }
  CLog::Log(LOGDEBUG, "CCurlFile::GetMimeType - %s -> failed", url.Get().c_str());
  content = "";
  return false;
}
Exemplo n.º 2
0
void CRssReader::Process()
{
  while (GetQueueSize())
  {
    CSingleLock lock(*this);

    int iFeed = m_vecQueue.front();
    m_vecQueue.erase(m_vecQueue.begin());

    m_strFeed[iFeed] = "";
    m_strColors[iFeed] = "";

    CCurlFile http;
    http.SetUserAgent(g_settings.m_userAgent);
    http.SetTimeout(2);
    CStdString strXML;
    CStdString strUrl = m_vecUrls[iFeed];
    lock.Leave();

    int nRetries = 3;
    CURL url(strUrl);

    // we wait for the network to come up
    if ((url.GetProtocol() == "http" || url.GetProtocol() == "https") && !g_application.getNetwork().IsAvailable(true))
      strXML = "<rss><item><title>"+g_localizeStrings.Get(15301)+"</title></item></rss>";
    else
    {
      unsigned int starttime = XbmcThreads::SystemClockMillis();
      while ( (!m_bStop) && (nRetries > 0) )
      {
        unsigned int currenttimer = XbmcThreads::SystemClockMillis() - starttime;
        if (currenttimer > 15000)
        {
          CLog::Log(LOGERROR,"Timeout whilst retrieving %s", strUrl.c_str());
          http.Cancel();
          break;
        }
        nRetries--;

        if (url.GetProtocol() != "http" && url.GetProtocol() != "https")
        {
          CFile file;
          if (file.Open(strUrl))
          {
            char *yo = new char[(int)file.GetLength()+1];
            file.Read(yo,file.GetLength());
            yo[file.GetLength()] = '\0';
            strXML = yo;
            delete[] yo;
            break;
          }
        }
        else
          if (http.Get(strUrl, strXML))
          {
            CLog::Log(LOGDEBUG, "Got rss feed: %s", strUrl.c_str());
            break;
          }
      }
      http.Cancel();
    }
    if ((!strXML.IsEmpty()) && m_pObserver)
    {
      // erase any <content:encoded> tags (also unsupported by tinyxml)
      int iStart = strXML.Find("<content:encoded>");
      int iEnd = 0;
      while (iStart > 0)
      {
        // get <content:encoded> end position
        iEnd = strXML.Find("</content:encoded>", iStart) + 18;

        // erase the section
        strXML = strXML.erase(iStart, iEnd - iStart);

        iStart = strXML.Find("<content:encoded>");
      }

      if (Parse((LPSTR)strXML.c_str(),iFeed))
      {
        CLog::Log(LOGDEBUG, "Parsed rss feed: %s", strUrl.c_str());
      }
    }
  }
  UpdateObserver();
}
Exemplo n.º 3
0
void CRssReader::Process()
{
  while (GetQueueSize())
  {
    CSingleLock lock(m_critical);

    int iFeed = m_vecQueue.front();
    m_vecQueue.erase(m_vecQueue.begin());

    m_strFeed[iFeed].clear();
    m_strColors[iFeed].clear();

    CCurlFile http;
    http.SetUserAgent(g_advancedSettings.m_userAgent);
    http.SetTimeout(2);
    std::string strXML;
    std::string strUrl = m_vecUrls[iFeed];
    lock.Leave();

    int nRetries = 3;
    CURL url(strUrl);
    std::string fileCharset;

    // we wait for the network to come up
    if ((url.IsProtocol("http") || url.IsProtocol("https")) &&
        !g_application.getNetwork().IsAvailable())
    {
      CLog::Log(LOGWARNING, "RSS: No network connection");
      strXML = "<rss><item><title>"+g_localizeStrings.Get(15301)+"</title></item></rss>";
    }
    else
    {
      XbmcThreads::EndTime timeout(15000);
      while (!m_bStop && nRetries > 0)
      {
        if (timeout.IsTimePast())
        {
          CLog::Log(LOGERROR, "Timeout while retrieving rss feed: %s", strUrl.c_str());
          break;
        }
        nRetries--;

        if (!url.IsProtocol("http") && !url.IsProtocol("https"))
        {
          CFile file;
          auto_buffer buffer;
          if (file.LoadFile(strUrl, buffer) > 0)
          {
            strXML.assign(buffer.get(), buffer.length());
            break;
          }
        }
        else
        {
          if (http.Get(strUrl, strXML))
          {
            fileCharset = http.GetServerReportedCharset();
            CLog::Log(LOGDEBUG, "Got rss feed: %s", strUrl.c_str());
            break;
          }
          else if (nRetries > 0)
            Sleep(5000); // Network problems? Retry, but not immediately.
          else
            CLog::Log(LOGERROR, "Unable to obtain rss feed: %s", strUrl.c_str());
        }
      }
      http.Cancel();
    }
    if (!strXML.empty() && m_pObserver)
    {
      // erase any <content:encoded> tags (also unsupported by tinyxml)
      size_t iStart = strXML.find("<content:encoded>");
      size_t iEnd = 0;
      while (iStart != std::string::npos)
      {
        // get <content:encoded> end position
        iEnd = strXML.find("</content:encoded>", iStart) + 18;

        // erase the section
        strXML = strXML.erase(iStart, iEnd - iStart);

        iStart = strXML.find("<content:encoded>");
      }

      if (Parse(strXML, iFeed, fileCharset))
        CLog::Log(LOGDEBUG, "Parsed rss feed: %s", strUrl.c_str());
    }
  }
  UpdateObserver();
}
Exemplo n.º 4
0
void CRssReader::Process()
{
  while (GetQueueSize())
  {
    CSingleLock lock(m_critical);

    int iFeed = m_vecQueue.front();
    m_vecQueue.erase(m_vecQueue.begin());

    m_strFeed[iFeed] = "";
    m_strColors[iFeed] = "";

    CCurlFile http;
    http.SetUserAgent(g_advancedSettings.m_userAgent);
    http.SetTimeout(2);
    CStdString strXML;
    CStdString strUrl = m_vecUrls[iFeed];
    lock.Leave();

    int nRetries = 3;
    CURL url(strUrl);

    // we wait for the network to come up
    if ((url.GetProtocol() == "http" || url.GetProtocol() == "https") &&
        !g_application.getNetwork().IsAvailable(true))
      strXML = "<rss><item><title>"+g_localizeStrings.Get(15301)+"</title></item></rss>";
    else
    {
      XbmcThreads::EndTime timeout(15000);
      while (!m_bStop && nRetries > 0)
      {
        if (timeout.IsTimePast())
        {
          CLog::Log(LOGERROR, "Timeout whilst retrieving %s", strUrl.c_str());
          http.Cancel();
          break;
        }
        nRetries--;

        if (url.GetProtocol() != "http" && url.GetProtocol() != "https")
        {
          void* bufferPtr;
          const unsigned int fsize = CFileUtils::LoadFile(strUrl, bufferPtr);
          if (fsize != 0)
          {
            strXML.assign((const char*)bufferPtr, fsize);
            free(bufferPtr);
            break;
          }
        }
        else
          if (http.Get(strUrl, strXML))
          {
            CLog::Log(LOGDEBUG, "Got rss feed: %s", strUrl.c_str());
            break;
          }
      }
      http.Cancel();
    }
    if (!strXML.IsEmpty() && m_pObserver)
    {
      // erase any <content:encoded> tags (also unsupported by tinyxml)
      int iStart = strXML.Find("<content:encoded>");
      int iEnd = 0;
      while (iStart > 0)
      {
        // get <content:encoded> end position
        iEnd = strXML.Find("</content:encoded>", iStart) + 18;

        // erase the section
        strXML = strXML.erase(iStart, iEnd - iStart);

        iStart = strXML.Find("<content:encoded>");
      }

      if (Parse((LPSTR)strXML.c_str(), iFeed))
        CLog::Log(LOGDEBUG, "Parsed rss feed: %s", strUrl.c_str());
    }
  }
  UpdateObserver();
}