int CAddonCallbacksAddon::GetFileChunkSize(const void* addonData, void* file) { CAddonInterfaces* helper = (CAddonInterfaces*) addonData; if (!helper) return 0; CFile* cfile = (CFile*)file; if (!cfile) return 0; return cfile->GetChunkSize(); }
bool CFile::Copy(const CURL& url2, const CURL& dest, XFILE::IFileCallback* pCallback, void* pContext) { CFile file; const std::string pathToUrl(dest.Get()); if (pathToUrl.empty()) return false; // special case for zips - ignore caching CURL url(url2); if (URIUtils::IsInZIP(url.Get()) || URIUtils::IsInAPK(url.Get())) url.SetOptions("?cache=no"); if (file.Open(url.Get(), READ_TRUNCATED | READ_CHUNKED)) { CFile newFile; if (URIUtils::IsHD(pathToUrl)) // create possible missing dirs { std::vector<std::string> tokens; std::string strDirectory = URIUtils::GetDirectory(pathToUrl); URIUtils::RemoveSlashAtEnd(strDirectory); // for the test below if (!(strDirectory.size() == 2 && strDirectory[1] == ':')) { CURL url(strDirectory); std::string pathsep; #ifndef TARGET_POSIX pathsep = "\\"; #else pathsep = "/"; #endif // Try to use the recursive creation first, if it fails // it might not be implemented for that subsystem so let's // fall back to the old method in that case if (!CDirectory::Create(url)) { StringUtils::Tokenize(url.GetFileName(), tokens, pathsep.c_str()); std::string strCurrPath; // Handle special if (!url.GetProtocol().empty()) { pathsep = "/"; strCurrPath += url.GetProtocol() + "://"; } // If the directory has a / at the beginning, don't forget it else if (strDirectory[0] == pathsep[0]) strCurrPath += pathsep; for (std::vector<std::string>::iterator iter = tokens.begin(); iter != tokens.end(); ++iter) { strCurrPath += *iter + pathsep; CDirectory::Create(strCurrPath); } } } } if (CFile::Exists(dest)) CFile::Delete(dest); if (!newFile.OpenForWrite(dest, true)) // overwrite always { file.Close(); return false; } int iBufferSize = GetChunkSize(file.GetChunkSize(), 128 * 1024); auto_buffer buffer(iBufferSize); ssize_t iRead, iWrite; unsigned long long llFileSize = file.GetLength(); unsigned long long llPos = 0; CStopWatch timer; timer.StartZero(); float start = 0.0f; while (true) { g_application.ResetScreenSaver(); iRead = file.Read(buffer.get(), iBufferSize); if (iRead == 0) break; else if (iRead < 0) { CLog::Log(LOGERROR, "%s - Failed read from file %s", __FUNCTION__, url.GetRedacted().c_str()); llFileSize = (uint64_t)-1; break; } /* write data and make sure we managed to write it all */ iWrite = 0; while(iWrite < iRead) { ssize_t iWrite2 = newFile.Write(buffer.get() + iWrite, iRead - iWrite); if(iWrite2 <=0) break; iWrite+=iWrite2; } if (iWrite != iRead) { CLog::Log(LOGERROR, "%s - Failed write to file %s", __FUNCTION__, dest.GetRedacted().c_str()); llFileSize = (uint64_t)-1; break; } llPos += iRead; // calculate the current and average speeds float end = timer.GetElapsedSeconds(); if (pCallback && end - start > 0.5 && end) { start = end; float averageSpeed = llPos / end; int ipercent = 0; if(llFileSize) ipercent = 100 * llPos / llFileSize; if(!pCallback->OnFileCallback(pContext, ipercent, averageSpeed)) { CLog::Log(LOGERROR, "%s - User aborted copy", __FUNCTION__); llFileSize = (uint64_t)-1; break; } } } /* close both files */ newFile.Close(); file.Close(); /* verify that we managed to completed the file */ if (llFileSize && llPos != llFileSize) { CFile::Delete(dest); return false; } return true; } return false; }