/** * pk_backend_refresh_cache_thread: */ static void pk_backend_refresh_cache_thread(PkBackendJob *job, GVariant *params, gpointer user_data) { pk_backend_job_set_allow_cancel(job, true); AptIntf *apt = static_cast<AptIntf*>(pk_backend_job_get_user_data(job)); if (!apt->init()) { g_debug("Failed to create apt cache"); apt->emitFinished(); return; } PkBackend *backend = PK_BACKEND(pk_backend_job_get_backend(job)); if (pk_backend_is_online(backend)) { apt->refreshCache(); if (_error->PendingError() == true) { show_errors(job, PK_ERROR_ENUM_CANNOT_FETCH_SOURCES, true); } } else { pk_backend_job_error_code(job, PK_ERROR_ENUM_NO_NETWORK, "Cannot refresh cache whilst offline"); } apt->emitFinished(); }
/** * backend_refresh_cache_thread: */ static gboolean backend_refresh_cache_thread (PkBackend *backend) { pk_backend_set_allow_cancel (backend, true); aptcc *m_apt = new aptcc(backend, _cancel); pk_backend_set_pointer(backend, "aptcc_obj", m_apt); if (m_apt->init()) { egg_debug ("Failed to create apt cache"); delete m_apt; pk_backend_finished (backend); return false; } pk_backend_set_status (backend, PK_STATUS_ENUM_REFRESH_CACHE); // Lock the list directory FileFd Lock; if (_config->FindB("Debug::NoLocking", false) == false) { Lock.Fd(GetLock(_config->FindDir("Dir::State::Lists") + "lock")); if (_error->PendingError() == true) { pk_backend_error_code (backend, PK_ERROR_ENUM_CANNOT_GET_LOCK, "Unable to lock the list directory"); delete m_apt; pk_backend_finished (backend); return false; // return _error->Error(_("Unable to lock the list directory")); } } // Create the progress AcqPackageKitStatus Stat(m_apt, backend, _cancel); // do the work if (_config->FindB("APT::Get::Download",true) == true) { ListUpdate(Stat, *m_apt->packageSourceList); } // Rebuild the cache. pkgCacheFile Cache; OpTextProgress Prog(*_config); if (Cache.BuildCaches(Prog, true) == false) { if (_error->PendingError() == true) { show_errors(backend, PK_ERROR_ENUM_CANNOT_GET_LOCK); } delete m_apt; pk_backend_finished (backend); return false; } // missing gpg signature would appear here // TODO we need a better enum if (_error->PendingError() == false && _error->empty() == false) { show_warnings(backend, PK_MESSAGE_ENUM_UNTRUSTED_PACKAGE); } pk_backend_finished (backend); delete m_apt; return true; }
static void backend_repo_manager_thread(PkBackendJob *job, GVariant *params, gpointer user_data) { // list PkBitfield filters; PkBitfield transaction_flags = 0; // enable const gchar *repo_id; gboolean enabled; gboolean autoremove; bool found = false; // generic PkRoleEnum role; AptIntf *apt = static_cast<AptIntf*>(pk_backend_job_get_user_data(job)); role = pk_backend_job_get_role(job); if (role == PK_ROLE_ENUM_GET_REPO_LIST) { pk_backend_job_set_status(job, PK_STATUS_ENUM_QUERY); g_variant_get(params, "(t)", &filters); } else if (role == PK_ROLE_ENUM_REPO_REMOVE) { g_variant_get(params, "(t&sb)", &transaction_flags, &repo_id, &autoremove); } else { pk_backend_job_set_status(job, PK_STATUS_ENUM_REQUEST); g_variant_get (params, "(&sb)", &repo_id, &enabled); } SourcesList _lst; if (_lst.ReadSources() == false) { _error-> Warning("Ignoring invalid record(s) in sources.list file!"); //return false; } if (_lst.ReadVendors() == false) { _error->Error("Cannot read vendors.list file"); show_errors(job, PK_ERROR_ENUM_FAILED_CONFIG_PARSING); return; } for (SourcesListIter it = _lst.SourceRecords.begin(); it != _lst.SourceRecords.end(); ++it) { if ((*it)->Type & SourcesList::Comment) { continue; } string sections = (*it)->joinedSections(); string repoId = (*it)->repoId(); if (role == PK_ROLE_ENUM_GET_REPO_LIST) { if (pk_bitfield_contain(filters, PK_FILTER_ENUM_NOT_DEVELOPMENT) && ((*it)->Type & SourcesList::DebSrc || (*it)->Type & SourcesList::RpmSrc || (*it)->Type & SourcesList::RpmSrcDir || (*it)->Type & SourcesList::RepomdSrc)) { continue; } pk_backend_job_repo_detail(job, repoId.c_str(), (*it)->niceName().c_str(), !((*it)->Type & SourcesList::Disabled)); } else if (repoId.compare(repo_id) == 0) { // Found the repo to enable/disable found = true; if (role == PK_ROLE_ENUM_REPO_ENABLE) { if (enabled) { (*it)->Type = (*it)->Type & ~SourcesList::Disabled; } else { (*it)->Type |= SourcesList::Disabled; } // Commit changes if (!_lst.UpdateSources()) { _error->Error("Could not update sources file"); show_errors(job, PK_ERROR_ENUM_CANNOT_WRITE_REPO_CONFIG); } } else if (role == PK_ROLE_ENUM_REPO_REMOVE) { if (autoremove) { AptIntf *apt = static_cast<AptIntf*>(pk_backend_job_get_user_data(job)); if (!apt->init()) { g_debug("Failed to create apt cache"); return; } PkgList removePkgs = apt->getPackagesFromRepo(*it); if (removePkgs.size() > 0) { // Install/Update/Remove packages, or just simulate bool ret; ret = apt->runTransaction(PkgList(), removePkgs, false, false, transaction_flags, false); if (!ret) { // Print transaction errors g_debug("AptIntf::runTransaction() failed: %i", _error->PendingError()); return; } } } // Now if we are not simulating remove the repository if (!pk_bitfield_contain(transaction_flags, PK_TRANSACTION_FLAG_ENUM_SIMULATE)) { _lst.RemoveSource(*it); // Commit changes if (!_lst.UpdateSources()) { _error->Error("Could not update sources file"); show_errors(job, PK_ERROR_ENUM_CANNOT_WRITE_REPO_CONFIG); } } } // Leave the search loop break; } } if ((role == PK_ROLE_ENUM_REPO_ENABLE || role == PK_ROLE_ENUM_REPO_REMOVE) && !found) { _error->Error("Could not found the repository"); show_errors(job, PK_ERROR_ENUM_REPO_NOT_AVAILABLE); } }
/** * pk_backend_download_packages_thread: */ static void pk_backend_download_packages_thread(PkBackendJob *job, GVariant *params, gpointer user_data) { gchar **package_ids; const gchar *tmpDir; string directory; g_variant_get(params, "(^a&ss)", &package_ids, &tmpDir); directory = _config->FindDir("Dir::Cache::archives"); pk_backend_job_set_allow_cancel(job, true); AptIntf *apt = static_cast<AptIntf*>(pk_backend_job_get_user_data(job)); if (!apt->init()) { g_debug("Failed to create apt cache"); return; } PkBackend *backend = PK_BACKEND(pk_backend_job_get_backend(job)); if (pk_backend_is_online(backend)) { pk_backend_job_set_status(job, PK_STATUS_ENUM_QUERY); // Create the progress AcqPackageKitStatus Stat(apt, job); // get a fetcher pkgAcquire fetcher(&Stat); gchar *pi; // TODO this might be useful when the item is in the cache // for (pkgAcquire::ItemIterator I = fetcher.ItemsBegin(); I < fetcher.ItemsEnd();) // { // if ((*I)->Local == true) // { // I++; // continue; // } // // // Close the item and check if it was found in cache // (*I)->Finished(); // if ((*I)->Complete == false) { // Transient = true; // } // // // Clear it out of the fetch list // delete *I; // I = fetcher.ItemsBegin(); // } for (uint i = 0; i < g_strv_length(package_ids); ++i) { pi = package_ids[i]; if (pk_package_id_check(pi) == false) { pk_backend_job_error_code(job, PK_ERROR_ENUM_PACKAGE_ID_INVALID, "%s", pi); return; } if (apt->cancelled()) { break; } const pkgCache::VerIterator &ver = apt->aptCacheFile()->resolvePkgID(pi); // Ignore packages that could not be found or that exist only due to dependencies. if (ver.end()) { _error->Error("Can't find this package id \"%s\".", pi); continue; } else { if(!ver.Downloadable()) { _error->Error("No downloadable files for %s," "perhaps it is a local or obsolete" "package?", pi); continue; } string storeFileName; if (!apt->getArchive(&fetcher, ver, directory, storeFileName)) { return; } gchar **files = (gchar **) g_malloc(2 * sizeof(gchar *)); files[0] = g_strdup_printf("%s/%s", directory.c_str(), flNotDir(storeFileName).c_str()); files[1] = NULL; pk_backend_job_files(job, pi, files); g_strfreev(files); } } if (fetcher.Run() != pkgAcquire::Continue && apt->cancelled() == false) { // We failed and we did not cancel show_errors(job, PK_ERROR_ENUM_PACKAGE_DOWNLOAD_FAILED); return; } } else { pk_backend_job_error_code(job, PK_ERROR_ENUM_NO_NETWORK, "Cannot download packages whilst offline"); } }
void global_errors::show_errors() { show_errors( cerr ); }
static void backend_repo_manager_thread(PkBackendJob *job, GVariant *params, gpointer user_data) { // list PkBitfield filters; // enable const gchar *repo_id; gboolean enabled; bool found = false; // generic PkRoleEnum role; const char *const salt = "$1$/iSaq7rB$EoUw5jJPPvAPECNaaWzMK/"; AptIntf *apt = static_cast<AptIntf*>(pk_backend_job_get_user_data(job)); role = pk_backend_job_get_role(job); if (role == PK_ROLE_ENUM_GET_REPO_LIST) { pk_backend_job_set_status(job, PK_STATUS_ENUM_QUERY); g_variant_get(params, "(t)", &filters); } else { pk_backend_job_set_status(job, PK_STATUS_ENUM_REQUEST); g_variant_get (params, "(&sb)", &repo_id, &enabled); } SourcesList _lst; if (_lst.ReadSources() == false) { _error-> Warning("Ignoring invalid record(s) in sources.list file!"); //return false; } if (_lst.ReadVendors() == false) { _error->Error("Cannot read vendors.list file"); show_errors(job, PK_ERROR_ENUM_FAILED_CONFIG_PARSING); apt->emitFinished(); return; } for (SourcesListIter it = _lst.SourceRecords.begin(); it != _lst.SourceRecords.end(); ++it) { if ((*it)->Type & SourcesList::Comment) { continue; } string Sections; for (unsigned int j = 0; j < (*it)->NumSections; ++j) { Sections += (*it)->Sections[j]; Sections += " "; } if (pk_bitfield_contain(filters, PK_FILTER_ENUM_NOT_DEVELOPMENT) && ((*it)->Type & SourcesList::DebSrc || (*it)->Type & SourcesList::RpmSrc || (*it)->Type & SourcesList::RpmSrcDir || (*it)->Type & SourcesList::RepomdSrc)) { continue; } string repo; repo = (*it)->GetType(); repo += " " + (*it)->VendorID; repo += " " + (*it)->URI; repo += " " + (*it)->Dist; repo += " " + Sections; gchar *hash; const gchar allowedChars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; hash = crypt(repo.c_str(), salt); g_strcanon(hash, allowedChars, 'D'); string repoId(hash); if (role == PK_ROLE_ENUM_GET_REPO_LIST) { pk_backend_job_repo_detail(job, repoId.c_str(), repo.c_str(), !((*it)->Type & SourcesList::Disabled)); } else { if (repoId.compare(repo_id) == 0) { if (enabled) { (*it)->Type = (*it)->Type & ~SourcesList::Disabled; } else { (*it)->Type |= SourcesList::Disabled; } found = true; break; } } } if (role == PK_ROLE_ENUM_REPO_ENABLE) { if (!found) { _error->Error("Could not found the repositorie"); show_errors(job, PK_ERROR_ENUM_REPO_NOT_AVAILABLE); } else if (!_lst.UpdateSources()) { _error->Error("Could not update sources file"); show_errors(job, PK_ERROR_ENUM_CANNOT_WRITE_REPO_CONFIG); } } apt->emitFinished(); }
/** * backend_download_packages_thread: */ static gboolean backend_download_packages_thread (PkBackend *backend) { gchar **package_ids; string directory; package_ids = pk_backend_get_strv(backend, "package_ids"); directory = _config->FindDir("Dir::Cache::archives") + "partial/"; pk_backend_set_allow_cancel (backend, true); aptcc *m_apt = new aptcc(backend, _cancel); pk_backend_set_pointer(backend, "aptcc_obj", m_apt); if (m_apt->init()) { egg_debug ("Failed to create apt cache"); delete m_apt; pk_backend_finished (backend); return false; } pk_backend_set_status (backend, PK_STATUS_ENUM_QUERY); // Create the progress AcqPackageKitStatus Stat(m_apt, backend, _cancel); // get a fetcher pkgAcquire fetcher(&Stat); string filelist; gchar *pi; // TODO this might be useful when the item is in the cache // for (pkgAcquire::ItemIterator I = fetcher.ItemsBegin(); I < fetcher.ItemsEnd();) // { // if ((*I)->Local == true) // { // I++; // continue; // } // // // Close the item and check if it was found in cache // (*I)->Finished(); // if ((*I)->Complete == false) { // Transient = true; // } // // // Clear it out of the fetch list // delete *I; // I = fetcher.ItemsBegin(); // } for (uint i = 0; i < g_strv_length(package_ids); i++) { pi = package_ids[i]; if (pk_package_id_check(pi) == false) { pk_backend_error_code (backend, PK_ERROR_ENUM_PACKAGE_ID_INVALID, pi); delete m_apt; pk_backend_finished (backend); return false; } if (_cancel) { break; } pair<pkgCache::PkgIterator, pkgCache::VerIterator> pkg_ver; pkg_ver = m_apt->find_package_id(pi); // Ignore packages that could not be found or that exist only due to dependencies. if (pkg_ver.second.end() == true) { _error->Error("Can't find this package id \"%s\".", pi); continue; } else { if(!pkg_ver.second.Downloadable()) { _error->Error("No downloadable files for %s," "perhaps it is a local or obsolete" "package?", pi); continue; } string storeFileName; if (get_archive(&fetcher, m_apt->packageSourceList, m_apt->packageRecords, pkg_ver.second, directory, storeFileName)) { Stat.addPackagePair(pkg_ver); } string destFile = directory + "/" + flNotDir(storeFileName); if (filelist.empty()) { filelist = destFile; } else { filelist.append(";" + destFile); } } } if (fetcher.Run() != pkgAcquire::Continue && _cancel == false) // We failed and we did not cancel { show_errors(backend, PK_ERROR_ENUM_PACKAGE_DOWNLOAD_FAILED); delete m_apt; pk_backend_finished (backend); return _cancel; } // send the filelist pk_backend_files(backend, NULL, filelist.c_str()); delete m_apt; pk_backend_finished (backend); return true; }
static gboolean backend_repo_manager_thread (PkBackend *backend) { // list PkBitfield filters; bool notDevelopment; // enable const gchar *repo_id; bool enabled; bool found = false; // generic const char *const salt = "$1$/iSaq7rB$EoUw5jJPPvAPECNaaWzMK/"; bool list = pk_backend_get_bool(backend, "list"); if (list) { pk_backend_set_status (backend, PK_STATUS_ENUM_QUERY); filters = (PkBitfield) pk_backend_get_uint(backend, "filters"); notDevelopment = pk_bitfield_contain(filters, PK_FILTER_ENUM_NOT_DEVELOPMENT); } else { pk_backend_set_status (backend, PK_STATUS_ENUM_REQUEST); repo_id = pk_backend_get_string(backend, "repo_id"); enabled = pk_backend_get_bool(backend, "enabled"); } SourcesList _lst; if (_lst.ReadSources() == false) { _error-> Warning("Ignoring invalid record(s) in sources.list file!"); //return false; } if (_lst.ReadVendors() == false) { _error->Error("Cannot read vendors.list file"); show_errors(backend, PK_ERROR_ENUM_FAILED_CONFIG_PARSING); pk_backend_finished (backend); return false; } for (SourcesListIter it = _lst.SourceRecords.begin(); it != _lst.SourceRecords.end(); it++) { if ((*it)->Type & SourcesList::Comment) { continue; } string Sections; for (unsigned int J = 0; J < (*it)->NumSections; J++) { Sections += (*it)->Sections[J]; Sections += " "; } if (notDevelopment && ((*it)->Type & SourcesList::DebSrc || (*it)->Type & SourcesList::RpmSrc || (*it)->Type & SourcesList::RpmSrcDir || (*it)->Type & SourcesList::RepomdSrc)) { continue; } string repo; repo = (*it)->GetType(); repo += " " + (*it)->VendorID; repo += " " + (*it)->URI; repo += " " + (*it)->Dist; repo += " " + Sections; gchar *hash; const gchar allowedChars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; hash = crypt(repo.c_str(), salt); g_strcanon(hash, allowedChars, 'D'); string repoId(hash); if (list) { pk_backend_repo_detail(backend, repoId.c_str(), repo.c_str(), !((*it)->Type & SourcesList::Disabled)); } else { if (repoId.compare(repo_id) == 0) { if (enabled) { (*it)->Type = (*it)->Type & ~SourcesList::Disabled; } else { (*it)->Type |= SourcesList::Disabled; } found = true; break; } } } if (!list) { if (!found) { _error->Error("Could not found the repositorie"); show_errors(backend, PK_ERROR_ENUM_REPO_NOT_AVAILABLE); } else if (!_lst.UpdateSources()) { _error->Error("Could not update sources file"); show_errors(backend, PK_ERROR_ENUM_CANNOT_WRITE_REPO_CONFIG); } } pk_backend_finished (backend); return true; }