std::vector<std::string> list_files( const std::string &path, file_filter filter, size_t max_count) { const std::wstring wpath = clean_path(platform::to_windows_path(::to_lower(path))) + L'\\'; auto &hidden_dirs = platform::hidden_dirs(); for (auto &i : hidden_dirs) if (starts_with(wpath, i + L'\\')) return std::vector<std::string>(); std::vector<std::string> result; WIN32_FIND_DATA find_data; HANDLE handle = FindFirstFile((wpath + L'*').c_str(), &find_data); if (handle != INVALID_HANDLE_VALUE) { auto &hidden_names = platform::hidden_names(); auto &hidden_suffixes = platform::hidden_suffixes(); do { if ((find_data.cFileName[0] != L'.') && ((find_data.dwFileAttributes & FILE_ATTRIBUTE_HIDDEN) == 0) && ((find_data.dwFileAttributes & FILE_ATTRIBUTE_SYSTEM) == 0)) { std::wstring name = find_data.cFileName, lname = to_lower(name); if ((find_data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) && (hidden_names.find(lname) == hidden_names.end()) && (hidden_dirs.find(wpath + lname) == hidden_dirs.end())) { result.emplace_back(from_utf16(name) + '/'); } else if (filter == file_filter::all) { result.emplace_back(from_utf16(name)); } else if ((filter == file_filter::large_files) && (hidden_suffixes.find(suffix_of(lname)) == hidden_suffixes.end())) { struct __stat64 stat; if ((::_wstat64( (wpath + find_data.cFileName).c_str(), &stat) == 0) && (size_t(stat.st_size) >= min_file_size)) { result.emplace_back(from_utf16(name)); } } } } while((result.size() < max_count) && (FindNextFile(handle, &find_data) != 0)); FindClose(handle); } return result; }
std::vector<std::string> list_files( const std::string &path, file_filter filter, size_t max_count) { std::string cpath = path; while (!cpath.empty() && (cpath.back() == '/')) cpath.pop_back(); cpath.push_back('/'); auto &hidden_dirs = platform::hidden_dirs(); for (auto &i : hidden_dirs) if (starts_with(cpath, i + '/')) return std::vector<std::string>(); std::vector<std::string> result; auto dir = ::opendir(cpath.c_str()); if (dir) { auto &hidden_names = platform::hidden_names(); auto &hidden_suffixes = platform::hidden_suffixes(); for (auto dirent = ::readdir(dir); dirent && (result.size() < max_count); dirent = ::readdir(dir)) { struct stat stat; if ((dirent->d_name[0] != '.') && (::stat((cpath + dirent->d_name).c_str(), &stat) == 0)) { const std::string name = dirent->d_name; if (!name.empty() && (name[name.length() - 1] != '~')) { const std::string lname = to_lower(name); if (S_ISDIR(stat.st_mode) && (hidden_names.find(lname) == hidden_names.end()) && (hidden_dirs.find(cpath + name) == hidden_dirs.end())) { result.emplace_back(name + '/'); } else if (filter == file_filter::all) { result.emplace_back(std::move(name)); } else if ((filter == file_filter::large_files) && (size_t(stat.st_size) >= min_file_size) && (hidden_suffixes.find(suffix_of(lname)) == hidden_suffixes.end())) { result.emplace_back(std::move(name)); } } } } ::closedir(dir); } return result; }
/* * extract the given (expanded) URL "url" to the given directory "dir" * return -1 on error, 0 else; */ int unpackURL(const char *url, const char *dir) { char *pkg; int rc; char base[MaxPathSize]; char pkg_path[MaxPathSize]; { /* Verify if the URL is really ok */ char expnd[MaxPathSize]; rc=expandURL(expnd, url); if (rc == -1) { warnx("unpackURL: verification expandURL failed"); return -1; } if (strcmp(expnd, url) != 0) { warnx("unpackURL: verification expandURL failed, '%s'!='%s'", expnd, url); return -1; } } pkg=strrchr(url, '/'); if (pkg == NULL) { warnx("unpackURL: no '/' in URL %s?!", url); return -1; } (void) snprintf(base, sizeof(base), "%.*s/", (int)(pkg-url), url); (void) snprintf(pkg_path, sizeof(pkg_path), "%.*s", (int)(pkg-url), url); /* no trailing '/' */ pkg++; /* Leave a hint for any depending pkgs that may need it */ if (getenv("PKG_PATH") == NULL) { setenv("PKG_PATH", pkg_path, 1); #if 0 path_create(pkg_path); /* XXX */ #endif if (Verbose) printf("setenv PKG_PATH='%s'\n", pkg_path); } if (strncmp(url, "http://", 7) == 0) return http_fetch(url, dir); rc = ftp_start(base); if (rc == -1) { warnx("ftp_start() failed"); return -1; /* error */ } { char cmd[1024]; const char *decompress_cmd = NULL; const char *suf; if (Verbose) printf("unpackURL '%s' to '%s'\n", url, dir); suf = suffix_of(pkg); if (!strcmp(suf, "tbz") || !strcmp(suf, "bz2")) decompress_cmd = BZIP2_CMD; else if (!strcmp(suf, "tgz") || !strcmp(suf, "gz")) decompress_cmd = GZIP_CMD; else if (!strcmp(suf, "tar")) ; /* do nothing */ else errx(EXIT_FAILURE, "don't know how to decompress %s, sorry", pkg); /* yes, this is gross, but needed for borken ftp(1) */ (void) snprintf(cmd, sizeof(cmd), "get %s \"| ( cd %s; " TAR_CMD " %s %s -vvxp -f - | tee %s )\"\n", pkg, dir, decompress_cmd != NULL ? "--use-compress-program" : "", decompress_cmd != NULL ? decompress_cmd : "", Verbose ? "/dev/stderr" : "/dev/null"); rc = ftp_cmd(cmd, "\n(226|550).*\n"); if (rc != 226) { warnx("Cannot fetch file (%d!=226)!", rc); return -1; } } return 0; }