void cgit_print_summary() { html("<table summary='repository info' class='list nowrap'>"); cgit_print_branches(ctx.cfg.summary_branches); html("<tr class='nohover'><td colspan='4'> </td></tr>"); cgit_print_tags(ctx.cfg.summary_tags); if (ctx.cfg.summary_log > 0) { html("<tr class='nohover'><td colspan='4'> </td></tr>"); cgit_print_log(ctx.qry.head, 0, ctx.cfg.summary_log, NULL, NULL, NULL, 0, 0); } if (ctx.repo->clone_url) { char *full_clone = expand_macros(ctx.repo->clone_url); char *pound = full_clone; while (1) { pound = strchr(pound, '#'); if (pound == NULL) break; *pound = ' '; pound++; } print_url(full_clone, NULL); /*print_urls(expand_macros(ctx.repo->clone_url), NULL);*/ } else if (ctx.cfg.clone_prefix) print_urls(ctx.cfg.clone_prefix, ctx.repo->url); html("</table>"); }
void cgit_print_summary() { html("<table summary='repository info' class='list nowrap'>"); cgit_print_branches(ctx.cfg.summary_branches); html("<tr class='nohover'><td colspan='4'> </td></tr>"); cgit_print_tags(ctx.cfg.summary_tags); if (ctx.cfg.summary_log > 0) { html("<tr class='nohover'><td colspan='4'> </td></tr>"); cgit_print_log(ctx.qry.head, 0, ctx.cfg.summary_log, NULL, NULL, NULL, 0); } if (ctx.repo->clone_url) print_urls(ctx.repo->clone_url, NULL); else if (ctx.cfg.clone_prefix) print_urls(ctx.cfg.clone_prefix, ctx.repo->url); html("</table>"); }
bool_t get_action(cmdline_t* options) { char* to_dir = NULL; parser_t* ctx = NULL; stringset_t* urls = NULL; assert(options != NULL); // Locate the downloads directory. to_dir = fetchdeps_filesys_download_dir(options->fname); if (!to_dir) goto failure; // Check that the downloads directory exists. if (!fetchdeps_filesys_is_directory(to_dir)) { fetchdeps_errors_set_with_msg(ERR_NO_DIR, "Bad download directory (you may need to run 'deps init')"); goto failure; } // Set up for parsing. ctx = fetchdeps_parser_new(options->fname); if (!ctx) goto failure; if (!fetchdeps_environ_init_all_vars(ctx->vars, options->argv)) goto failure; urls = fetchdeps_stringset_new(); if (!urls) goto failure; // Parse away! if (!fetchdeps_parser_parse(ctx, urls)) goto failure; // Finished parsing, let's do something with the urls. if (options->no_changes) print_urls(urls); else if (!fetchdeps_download_fetch_all(urls, to_dir)) goto failure; // Cleanup if (to_dir) free(to_dir); fetchdeps_parser_free(ctx); fetchdeps_stringset_free(urls); return 1; failure: fetchdeps_errors_trap_system_error(); if (to_dir) free(to_dir); if (ctx) fetchdeps_parser_free(ctx); if (urls) fetchdeps_stringset_free(urls); return 0; }
int main(int argc, char *argv[]) { int c, sfd, cfd; u_long dst; u_short dport, win = WINDOW_LEN; struct sockaddr_in sin; while ((c = getopt(argc, argv, "w:h?")) != -1) { switch (c) { case 'w': if ((win = atoi(optarg)) == 0) usage(); break; default: usage(); } } argc -= optind; argv += optind; if (argc != 2) usage(); if ((dst = resolve_host(argv[0])) == 0) usage(); if ((dport = atoi(argv[1])) == 0) usage(); if ((sfd = init_ftpd(FTPD_PORT, win)) == -1) { perror("init_ftpd"); exit(1); } print_urls(dst, dport, win); for (;;) { c = sizeof(sin); if ((cfd = accept(sfd, (struct sockaddr *)&sin, &c)) == -1) { perror("accept"); exit(1); } printf("connection from %s\n", inet_ntoa(sin.sin_addr)); if (fork() == 0) { close(sfd); do_ftpd(cfd); close(cfd); exit(0); } close(cfd); } exit(0); }
bool_t list_action(cmdline_t* options) { parser_t* ctx = NULL; stringset_t* urls = NULL; assert(options != NULL); // Set up for parsing. ctx = fetchdeps_parser_new(options->fname); if (!ctx) goto failure; if (!fetchdeps_environ_init_all_vars(ctx->vars, options->argv)) goto failure; urls = fetchdeps_stringset_new(); if (!urls) goto failure; // Parse away! if (!fetchdeps_parser_parse(ctx, urls)) goto failure; // Finished parsing, let's do something with the urls. print_urls(urls); // Cleanup fetchdeps_parser_free(ctx); fetchdeps_stringset_free(urls); return 1; failure: fetchdeps_errors_trap_system_error(); if (ctx) fetchdeps_parser_free(ctx); if (urls) fetchdeps_stringset_free(urls); return 0; }
// formatted output of details when long list is requested void print_details(const std::list<Arc::FileInfo>& files, bool show_urls, bool show_meta) { if (files.empty()) return; unsigned int namewidth = 0; unsigned int sizewidth = 0; unsigned int csumwidth = 0; // find longest length of each field to align the output for (std::list<Arc::FileInfo>::const_iterator i = files.begin(); i != files.end(); i++) { if (i->GetName().length() > namewidth) namewidth = i->GetName().length(); if (i->CheckSize() && i->GetSize() > 0 && // log(0) not good! (unsigned int)(log10(i->GetSize()))+1 > sizewidth) sizewidth = (unsigned int)(log10(i->GetSize()))+1; if (i->CheckCheckSum() && i->GetCheckSum().length() > csumwidth) csumwidth = i->GetCheckSum().length(); } std::cout << std::setw(namewidth) << std::left << "<Name> "; std::cout << "<Type> "; std::cout << std::setw(sizewidth + 4) << std::left << "<Size> "; std::cout << "<Modified> "; std::cout << "<Validity> "; std::cout << "<CheckSum> "; std::cout << std::setw(csumwidth) << std::right << "<Latency>"; std::cout << std::endl; // set minimum widths to accommodate headers if (namewidth < 7) namewidth = 7; if (sizewidth < 7) sizewidth = 7; if (csumwidth < 8) csumwidth = 8; for (std::list<Arc::FileInfo>::const_iterator i = files.begin(); i != files.end(); i++) { std::cout << std::setw(namewidth) << std::left << i->GetName(); switch (i->GetType()) { case Arc::FileInfo::file_type_file: std::cout << " file"; break; case Arc::FileInfo::file_type_dir: std::cout << " dir"; break; default: std::cout << " (n/a)"; break; } if (i->CheckSize()) { std::cout << " " << std::setw(sizewidth) << std::right << Arc::tostring(i->GetSize()); } else { std::cout << " " << std::setw(sizewidth) << std::right << " (n/a)"; } if (i->CheckModified()) { std::cout << " " << i->GetModified(); } else { std::cout << " (n/a) "; } if (i->CheckValid()) { std::cout << " " << i->GetValid(); } else { std::cout << " (n/a) "; } if (i->CheckCheckSum()) { std::cout << " " << std::setw(csumwidth) << std::left << i->GetCheckSum(); } else { std::cout << " " << std::setw(csumwidth) << std::left << " (n/a)"; } if (i->CheckLatency()) { std::cout << " " << i->GetLatency(); } else { std::cout << " (n/a)"; } std::cout << std::endl; if (show_urls) print_urls(*i); if (show_meta) print_meta(*i); } }
static bool arcls(const Arc::URL& dir_url, Arc::UserConfig& usercfg, bool show_details, // longlist bool show_urls, // locations bool show_meta, // metadata bool no_list, // don't list dirs bool force_list, // force dir list bool check_access, // checkaccess int recursion, // recursion int timeout) { // timeout if (!dir_url) { logger.msg(Arc::ERROR, "Invalid URL: %s", dir_url.fullstr()); return false; } if (dir_url.Protocol() == "urllist") { std::list<Arc::URL> dirs = Arc::ReadURLList(dir_url); if (dirs.empty()) { logger.msg(Arc::ERROR, "Can't read list of locations from file %s", dir_url.Path()); return false; } bool r = true; for (std::list<Arc::URL>::iterator dir = dirs.begin(); dir != dirs.end(); dir++) { if(!arcls(*dir, usercfg, show_details, show_urls, show_meta, no_list, force_list, check_access, recursion, timeout)) r = false; } return r; } Arc::DataHandle url(dir_url, usercfg); if (!url) { logger.msg(Arc::ERROR, "Unsupported URL given"); return false; } if (url->RequiresCredentials()) { if (usercfg.ProxyPath().empty() ) { logger.msg(Arc::ERROR, "Unable to list content of %s: No valid credentials found", dir_url.str()); return false; } Arc::Credential holder(usercfg.ProxyPath(), "", "", ""); if (holder.GetEndTime() < Arc::Time()){ logger.msg(Arc::ERROR, "Proxy expired"); logger.msg(Arc::ERROR, "Unable to list content of %s: No valid credentials found", dir_url.str()); return false; } } url->SetSecure(false); if(check_access) { std::cout << dir_url << " - "; if(url->Check(false)) { std::cout << "passed" << std::endl; return true; } else { std::cout << "failed" << std::endl; return false; } } Arc::DataPoint::DataPointInfoType verb = (Arc::DataPoint::DataPointInfoType) (Arc::DataPoint::INFO_TYPE_MINIMAL | Arc::DataPoint::INFO_TYPE_NAME); if(show_urls) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_STRUCT); if(show_meta) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_ALL); if(show_details) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_TYPE | Arc::DataPoint::INFO_TYPE_TIMES | Arc::DataPoint::INFO_TYPE_CONTENT | Arc::DataPoint::INFO_TYPE_ACCESS); if(recursion > 0) verb = (Arc::DataPoint::DataPointInfoType) (verb | Arc::DataPoint::INFO_TYPE_TYPE); Arc::DataStatus res; Arc::FileInfo file; std::list<Arc::FileInfo> files; if(no_list) { // only requested object is queried res = url->Stat(file, verb); if(res) files.push_back(file); } else if(force_list) { // assume it is directory, fail otherwise res = url->List(files, verb); } else { // try to guess what to do res = url->Stat(file, (Arc::DataPoint::DataPointInfoType)(verb | Arc::DataPoint::INFO_TYPE_TYPE)); if(res && (file.GetType() == Arc::FileInfo::file_type_file)) { // If it is file and we are sure, then just report it. files.push_back(file); } else { // If it is dir then we must list it. But if stat failed or // if type is undefined there is still chance it is directory. Arc::DataStatus res_ = url->List(files, verb); if(!res_) { // If listing failed maybe simply report previous result if any. if(res) { files.push_back(file); } } else { res = res_; } } } if (!res) { if (files.empty()) { logger.msg(Arc::ERROR, std::string(res)); if (res.Retryable()) logger.msg(Arc::ERROR, "This seems like a temporary error, please try again later"); return false; } logger.msg(Arc::INFO, "Warning: " "Failed listing files but some information is obtained"); } files.sort(); // Sort alphabetically by name if (show_details) { print_details(files, show_urls, show_meta); } else { for (std::list<Arc::FileInfo>::iterator i = files.begin(); i != files.end(); i++) { std::cout << i->GetName() << std::endl; if (show_urls) print_urls(*i); if (show_meta) print_meta(*i); } } // Do recursion. Recursion has no sense if listing is forbidden. if ((recursion > 0) && (!no_list)) { for (std::list<Arc::FileInfo>::iterator i = files.begin(); i != files.end(); i++) { if (i->GetType() == Arc::FileInfo::file_type_dir) { Arc::URL suburl = dir_url; if(suburl.Protocol() != "file") { if (suburl.Path()[suburl.Path().length() - 1] != '/') suburl.ChangePath(suburl.Path() + "/" + i->GetName()); else suburl.ChangePath(suburl.Path() + i->GetName()); } else { if (suburl.Path()[suburl.Path().length() - 1] != G_DIR_SEPARATOR) suburl.ChangePath(suburl.Path() + G_DIR_SEPARATOR_S + i->GetName()); else suburl.ChangePath(suburl.Path() + i->GetName()); } std::cout << std::endl; std::cout << suburl.str() << ":" << std::endl; arcls(suburl, usercfg, show_details, show_urls, show_meta, no_list, force_list, check_access, recursion - 1, timeout); std::cout << std::endl; } } } return true; }