示例#1
0
文件: xml.c 项目: MigNov/CDVWS
int process_data(xmlDocPtr doc, xmlNodePtr node, char *xpath, int level, char *fn) {
char *newxpath = NULL;
char *data = NULL;
int size, i, found;

node = node->xmlChildrenNode;
while (node != NULL) {
    data = (char *)xmlNodeListGetString(doc, node->xmlChildrenNode, 1);

    if ((data != NULL) && ((strlen(data) == 0) || (data[0] == 10))) {
        size = strlen(xpath) + strlen((const char *)node->name) + 2;
        newxpath = (char *)utils_alloc( "xml.process_data.newxpath", size * sizeof(char) );
        snprintf(newxpath, size, "%s/%s", xpath, node->name);

        process_recursive(doc, newxpath, level + 1, fn);
        newxpath = utils_free("xml.process_data.newxpath", newxpath);
    }
    else if (data != NULL) {
        found = 0;
        for (i = 0; i < xml_numAttr; i++) {
            if ((strcmp(xattr[i].name, (const char *)node->name) == 0)
                    && ((strcmp(xattr[i].node, xpath) == 0))
                    && ((strcmp(xattr[i].value, data) == 0))
                    && ((strcmp(xattr[i].filename, fn) == 0)))
                found = 1;
        }

        if (!found) {
            if (xattr == NULL)
                xattr = (tAttr *)utils_alloc( "xml.process_data.xattr", sizeof(tAttr) );
            else
                xattr = (tAttr *)realloc( xattr, (xml_numAttr + 1) * sizeof(tAttr) );

            xattr[xml_numAttr].name = strdup( (const char *)node->name);
            xattr[xml_numAttr].node = strdup(xpath);
            xattr[xml_numAttr].value = strdup(data);
            xattr[xml_numAttr].filename = strdup(fn);
            xattr[xml_numAttr].numIter = xml_numIter;
            xml_numAttr++;
        }
    }

    data = utils_free("xml.process_data.data", data);

    node = node->next;
}

xml_numIter++;
return 0;
}
示例#2
0
文件: xml.c 项目: MigNov/CDVWS
int xml_load_opt(char *xmlFile, char *root)
{
    xmlDocPtr doc;

    if (access(xmlFile, R_OK) != 0) {
        fprintf(stderr, "Error: File %s doesn't exist or is not accessible for reading.\n", xmlFile);
        return -ENOENT;
    }

    doc = xmlParseFile(xmlFile);
    process_recursive(doc, root, 0, xmlFile);

    xmlFreeDoc(doc);
    xmlCleanupParser();

    return 0;
}
示例#3
0
文件: xml.c 项目: MigNov/CDVWS
int xml_query(char *xmlFile, char *xPath) {
    xmlDocPtr doc;
    int ret = 0;

    if (access(xmlFile, R_OK) != 0) {
        fprintf(stderr, "Error: File %s doesn't exist or is not accessible for reading.\n", xmlFile);
        return -ENOENT;
    }

    doc = xmlParseFile(xmlFile);

    if (process_recursive(doc, xPath, 0, xmlFile) != 0)
        ret = -EINVAL;

    xmlFreeDoc(doc);
    xmlCleanupParser();

    return ret;
}
示例#4
0
  // process SCAN job
  static void process_scan_job(const hasher::job_t& job) {

    // print status
    print_status(job);

    size_t zero_count = 0;

    // get hash calculator object
    hasher::hash_calculator_t hash_calculator;

    // iterate over buffer to calculate and scan for block hashes
    for (size_t i=0; i < job.buffer_data_size; i+= job.step_size) {

      // skip if all the bytes are the same
      if (all_zero(job.buffer, job.buffer_size, i, job.block_size)) {
        ++zero_count;
        continue;
      }

      // calculate block hash
      const std::string block_hash = hash_calculator.calculate(job.buffer,
                  job.buffer_size, i, job.block_size);

      // scan
      const std::string json_string = 
              job.scan_manager->find_hash_json(job.scan_mode, block_hash);

      if (json_string.size() > 0) {
        // match so print offset <tab> file <tab> json
        std::stringstream ss;
        if (job.recursion_path != "") {
          // prepend recursion path before offset
          ss << job.recursion_path << "-";
        }

        // add the offset
        ss << job.file_offset + i << "\t";

        // add the block hash
        ss << hashdb::bin_to_hex(block_hash) << "\t";

        // add the json text and a newline
        ss << json_string << "\n";

        // print it
        hashdb::tprint(std::cout, ss.str());
      }
    }

    // submit tracked zero_count to the scan tracker for final reporting
    job.scan_tracker->track_zero_count(zero_count);

    // submit tracked bytes processed to the scan tracker for final reporting
    if (job.recursion_depth == 0) {
      job.scan_tracker->track_bytes(job.buffer_data_size);
    }

    // recursively find and process any uncompressible data
    if (!job.disable_recursive_processing) {
      process_recursive(job);
    }

    // we are now done with this job.  Delete it.
    delete[] job.buffer;
    delete &job;
  }
示例#5
0
  // process INGEST job
  static void process_ingest_job(const hasher::job_t& job) {

    // print status
    print_status(job);

    if (!job.disable_ingest_hashes) {
      // get hash calculator object
      hasher::hash_calculator_t hash_calculator;

      // get entropy calculator object
      hasher::entropy_calculator_t entropy_calculator(job.block_size);

      // iterate over buffer to add block hashes and metadata
      size_t zero_count = 0;
      size_t nonprobative_count = 0;
      for (size_t i=0; i < job.buffer_data_size; i+= job.step_size) {

        // skip if all the bytes are the same
        if (all_zero(job.buffer, job.buffer_size, i, job.block_size)) {
          ++zero_count;
          continue;
        }

        // calculate block hash
        const std::string block_hash = hash_calculator.calculate(job.buffer,
                                    job.buffer_size, i, job.block_size);

        // calculate entropy
        uint64_t k_entropy = 0;
        if (!job.disable_calculate_entropy) {
          k_entropy = entropy_calculator.calculate(job.buffer,
                                    job.buffer_size, i);
        }

        // calculate block label
        std::string block_label = "";
        if (!job.disable_calculate_labels) {
          block_label = hasher::calculate_block_label(job.buffer,
                                   job.buffer_size, i, job.block_size);
          if (block_label.size() != 0) {
            ++nonprobative_count;
          }
        }

        // add block hash to DB
        job.import_manager->insert_hash(block_hash, k_entropy, block_label,
                                        job.file_hash, job.file_offset+i);
      }

      // submit tracked source counts to the ingest tracker for final reporting
      job.ingest_tracker->track_source(
                               job.file_hash, zero_count, nonprobative_count);
    }

    // submit bytes processed to the ingest tracker for final reporting
    if (job.recursion_depth == 0) {
      job.ingest_tracker->track_bytes(job.buffer_data_size);
    }

    // recursively find and process any uncompressible data in order to
    // record their source names
    if (!job.disable_recursive_processing) {
      process_recursive(job);
    }

    // we are now done with this job.  Delete it.
    delete[] job.buffer;
    delete &job;
  }