Exemplo n.º 1
0
int main(int argc, char * argv[]){
    server_log(SERVER_LOG_INFO, "Starting MyFTP Server... \n");

    OptionParser parser = OptionParser().description("MyFTP Server.");
    parser.add_option("-v", "--verbose").dest("verbose").type("int").set_default("3")
        .help("Set log level. Default: 3 - INFO");
    parser.add_option("-p", "--port").dest("port").type("int").set_default("21")
        .help("Set listening port. Default: 21");
    parser.add_option("-i", "--IP").dest("ip").set_default("127.0.0.1")
        .help("Set listening IP. Default: 127.0.0.1");
    parser.add_option("-m", "--max-conns").dest("max_conns").type("int").set_default("10")
        .help("Set max connections limit. Default: 10");
    parser.add_option("-a", "--allow-anonymous").dest("allow_anony").type("int").set_default("0")
        .help("Allow anonymous connections. Default: 0");
    parser.add_option("-d", "--default-dir").dest("default_dir").set_default("")
        .help("Default dir for anonymous users");
    parser.add_option("-c", "--config").dest("config_file").set_default("")
        .help("Configuration file");
    parser.add_help_option();

    optparse::Values & options = parser.parse_args(argc, argv);

    myftpserver_t server_t;

    log_level = (int)options.get("verbose");

    server_t.port = (unsigned int)options.get("port");
    server_log(SERVER_LOG_INFO, "Server port parsed: %d \n", server_t.port);

    server_t.ipv4addr = (unsigned int)inet_addr(options["ip"].c_str());
    server_log(SERVER_LOG_INFO, "Server IP parsed: %s Hex: %08x \n", options["ip"].c_str(), server_t.ipv4addr);

    server_t.max_conns = (unsigned int)options.get("max_conns");
    server_log(SERVER_LOG_INFO, "Server max connections: %d\n", server_t.max_conns);

    server_t.allow_anonymous = (bool)options.get("allow_anony");
    server_log(SERVER_LOG_INFO, "Server allows anonymous connections: %d\n", server_t.allow_anonymous);

    strcpy(server_t.default_dir, options["default_dir"].c_str());
    server_log(SERVER_LOG_INFO, "Server default dir: %s\n", server_t.default_dir);

    start_server(&server_t);

    return 0;
}
Exemplo n.º 2
0
int main(int argc, char** argv)
{
    using boost::algorithm::join;
    using boost::make_optional;

    SetColumns();

    // args and options
    //
    //
    // clang-format off
    //   clang messes with the way the arg to version() is formatted..
    auto parser =
        OptionParser()
        .usage("usage: %prog [OPTIONS] OUTPUT FILES...")
        .version("%prog " CCS_VERSION " (commit " CCS_GIT_SHA1 ")"
                 "\nConsensusCore2 " CC2_VERSION " (commit " CC2_GIT_SHA1 ")"
                 "\nCopyright (c) 2014-2015 Pacific Biosciences, Inc.\nLicense: 3-BSD")
        .description(DESCRIPTION
                     "\nAdditional documentation: http://github.com/PacificBiosciences/pbccs");
    // clang-format on
    //
    const vector<string> logLevels = {"TRACE", "DEBUG", "INFO",     "NOTICE",
                                      "WARN",  "ERROR", "CRITICAL", "FATAL"
                                     };
    const string em = "--";

    parser.add_option(em + OptionNames::ForceOutput)
    .action("store_true")
    .help("Overwrite OUTPUT file if present.");
    parser.add_option(em + OptionNames::PbIndex)
    .action("store_true")
    .help("Generate a .pbi file for the OUTPUT file.");
    parser.add_option(em + OptionNames::Zmws)
    .help(
        "Generate CCS for the provided comma-separated holenumber ranges only. Default = all");
    parser.add_option(em + OptionNames::MinSnr)
    .type("float")
    .set_default(3.75)  // See https://github.com/PacificBiosciences/pbccs/issues/86 for a more
    // detailed discussion of this default.
    .help("Minimum SNR of input subreads. Default = %default");
    parser.add_option(em + OptionNames::MinReadScore)
    .type("float")
    .set_default(0.75)
    .help("Minimum read score of input subreads. Default = %default");

    ConsensusSettings::AddOptions(&parser);

    parser.add_option(em + OptionNames::ReportFile)
    .set_default("ccs_report.csv")
    .help("Where to write the results report. Default = %default");
    parser.add_option(em + OptionNames::NumThreads)
    .type("int")
    .set_default(0)
    .help("Number of threads to use, 0 means autodetection. Default = %default");
    // parser.add_option("--chunkSize").type("int").set_default(5).help("Number of CCS jobs to
    // submit simultaneously. Default = %default");
    parser.add_option(em + OptionNames::LogFile).help("Log to a file, instead of STDERR.");
    parser.add_option(em + OptionNames::LogLevel)
    .choices(logLevels.begin(), logLevels.end())
    .set_default("INFO")
    .help("Set log level. Default = %default");

    const auto options = parser.parse_args(argc, argv);
    auto files = parser.args();

    const ConsensusSettings settings(options);

    const bool forceOutput = options.get(OptionNames::ForceOutput);
    const bool pbIndex = options.get(OptionNames::PbIndex);
    const float minSnr = options.get(OptionNames::MinSnr);
    const float minReadScore = static_cast<float>(options.get(OptionNames::MinReadScore));
    const size_t nThreads = ThreadCount(options.get(OptionNames::NumThreads));
    const size_t chunkSize = 1;  // static_cast<size_t>(options.get("chunkSize"));

    if (static_cast<int>(options.get(OptionNames::MinPasses)) < 1)
        parser.error("option --minPasses: invalid value: must be >= 1");

    // handle --zmws
    //
    //
    optional<Whitelist> whitelist(none);
    const string wlspec(options.get(OptionNames::Zmws));
    try {
        if (!wlspec.empty()) whitelist = Whitelist(wlspec);
    } catch (...) {
        parser.error("option --zmws: invalid specification: '" + wlspec + "'");
    }

    // input validation
    //
    //
    if (files.size() < 1)
        parser.error("missing OUTPUT and FILES...");
    else if (files.size() < 2)
        parser.error("missing FILES...");

    // pop first file off the list, is OUTPUT file
    const string outputFile(files.front());
    files.erase(files.begin());

    // verify output file does not already exist
    if (FileExists(outputFile) && !forceOutput)
        parser.error("OUTPUT: file already exists: '" + outputFile + "'");

    // verify input files exist
    for (const auto& file : files)
        if (!FileExists(file)) parser.error("FILES...: file does not exist: '" + file + "'");

    // logging
    //
    //
    ofstream logStream;
    {
        string logLevel(options.get(OptionNames::LogLevel));
        string logFile(options.get(OptionNames::LogFile));

        if (!logFile.empty()) {
            logStream.open(logFile);
            Logging::Logger::Default(new Logging::Logger(logStream, logLevel));
        } else {
            Logging::Logger::Default(new Logging::Logger(cerr, logLevel));
        }
        Logging::InstallSignalHandlers();
    }

    // start processing chunks!
    //
    //
    const auto avail = PacBio::Consensus::SupportedChemistries();

    PBLOG_DEBUG << "Found consensus models for: (" << join(avail, ", ") << ')';

    DataSet ds(files);

    // test that all input chemistries are supported
    {
        set<string> used;
        try {
            used = ds.SequencingChemistries();
        } catch (InvalidSequencingChemistryException& e) {
            PBLOG_FATAL << e.what();
            exit(-1);
        }
        vector<string> unavail;

        set_difference(used.begin(), used.end(), avail.begin(), avail.end(),
                       back_inserter(unavail));

        if (!unavail.empty()) {
            PBLOG_FATAL << "Unsupported chemistries found: " << join(unavail, ", ");
            exit(-1);
        }

        PBLOG_DEBUG << "Using consensus models for: (" << join(used, ", ") << ')';
    }

    EntireFileQuery query(ds);

    WorkQueue<Results> workQueue(nThreads);
    size_t poorSNR = 0, tooFewPasses = 0;

    future<Results> writer;

    const string outputExt = FileExtension(outputFile);
    if (outputExt == "bam") {
        unique_ptr<BamWriter> ccsBam(
            new BamWriter(outputFile, PrepareHeader(parser, argc, argv, files)));
        unique_ptr<PbiBuilder> ccsPbi(pbIndex ? new PbiBuilder(outputFile + ".pbi") : nullptr);
        writer = async(launch::async, BamWriterThread, ref(workQueue), move(ccsBam), move(ccsPbi));
    } else if (outputExt == "fastq" || outputExt == "fq")
        writer = async(launch::async, FastqWriterThread, ref(workQueue), ref(outputFile));
    else
        parser.error("OUTPUT: invalid file extension: '" + outputExt + "'");

    unique_ptr<vector<Chunk>> chunk(new vector<Chunk>());
    map<string, shared_ptr<string>> movieNames;
    optional<int32_t> holeNumber(none);
    bool skipZmw = false;
    boost::optional<std::pair<uint16_t, uint16_t>> barcodes(boost::none);

    for (const auto& read : query) {
        const string movieName = read.MovieName();

        if (movieNames.find(movieName) == movieNames.end()) {
            movieNames[movieName] = make_shared<string>(movieName);
        }
        // Have we started a new ZMW?
        if (!holeNumber || *holeNumber != read.HoleNumber()) {
            if (chunk && !chunk->empty() && chunk->back().Reads.size() < settings.MinPasses) {
                PBLOG_DEBUG << "Skipping ZMW " << chunk->back().Id
                            << ", insufficient number of passes (" << chunk->back().Reads.size()
                            << '<' << settings.MinPasses << ')';
                tooFewPasses += 1;
                chunk->pop_back();
            }

            if (chunk && chunk->size() >= chunkSize) {
                workQueue.ProduceWith(CircularConsensus, move(chunk), settings);
                chunk.reset(new vector<Chunk>());
            }

            holeNumber = read.HoleNumber();
            auto snr = read.SignalToNoise();
            if (read.HasBarcodes()) {
                barcodes = read.Barcodes();
            }
            if (whitelist && !whitelist->Contains(movieName, *holeNumber)) {
                skipZmw = true;
            } else if (*min_element(snr.begin(), snr.end()) < minSnr) {
                PBLOG_DEBUG << "Skipping ZMW " << movieName << '/' << *holeNumber
                            << ", fails SNR threshold (" << minSnr << ')';
                poorSNR += 1;
                skipZmw = true;
            } else {
                skipZmw = false;
                chunk->emplace_back(Chunk{ReadId(movieNames[movieName], *holeNumber),
                                          vector<Subread>(), SNR(snr[0], snr[1], snr[2], snr[3]),
                                          read.ReadGroup().SequencingChemistry(), barcodes});
            }
        }

        if (skipZmw) continue;

        if (static_cast<float>(read.ReadAccuracy()) < minReadScore) {
            PBLOG_DEBUG << "Skipping read " << read.FullName() << ", insufficient read accuracy ("
                        << read.ReadAccuracy() << '<' << minReadScore << ')';
            continue;
        }
        // Check that barcode matches the previous ones
        if (barcodes) {
            // if not, set the barcodes to the flag and stop checking them.
            if (!read.HasBarcodes() || read.Barcodes() != barcodes) {
                barcodes->first = UINT16_MAX;
                barcodes->second = UINT16_MAX;
                chunk->back().Barcodes = barcodes;
                barcodes = boost::none;
            }
        }

        chunk->back().Reads.emplace_back(
            Subread{ReadId(movieNames[movieName], *holeNumber,
                           Interval(read.QueryStart(), read.QueryEnd())),
                    read.Sequence(), read.LocalContextFlags(), read.ReadAccuracy()});
    }

    // if the last chunk doesn't have enough passes, skip it
    if (chunk && !chunk->empty() && chunk->back().Reads.size() < settings.MinPasses) {
        PBLOG_DEBUG << "Skipping ZMW " << chunk->back().Id << ", insufficient number of passes ("
                    << chunk->back().Reads.size() << '<' << settings.MinPasses << ')';
        tooFewPasses += 1;
        chunk->pop_back();
    }

    // run the remaining tasks
    if (chunk && !chunk->empty()) {
        workQueue.ProduceWith(CircularConsensus, move(chunk), settings);
    }

    // wait for the queue to be done
    workQueue.Finalize();

    // wait for the writer thread and get the results counter
    //   then add in the snr/minPasses counts and write the report
    auto counts = writer.get();
    counts.PoorSNR += poorSNR;
    counts.TooFewPasses += tooFewPasses;
    const string reportFile(options.get(OptionNames::ReportFile));

    if (reportFile == "-") {
        WriteResultsReport(cout, counts);
    } else {
        ofstream stream(reportFile);
        WriteResultsReport(stream, counts);
    }

    return 0;
}