Ejemplo n.º 1
0
int rm_digest_hexstring(RmDigest *digest, char *buffer) {
    static const char *hex = "0123456789abcdef";
    guint8 *input = NULL;
    gsize bytes = 0;
    if(digest == NULL) {
        return 0;
    }

    if(digest->type == RM_DIGEST_PARANOID) {
        input = rm_digest_steal_buffer(digest->shadow_hash);
        bytes = digest->shadow_hash->bytes;
    } else {
        input = rm_digest_steal_buffer(digest);
        bytes = digest->bytes;
    }

    for(gsize i = 0; i < bytes; ++i) {
        buffer[0] = hex[input[i] / 16];
        buffer[1] = hex[input[i] % 16];

        if(i == bytes - 1) {
            buffer[2] = '\0';
        }

        buffer += 2;
    }

    g_slice_free1(bytes, input);
    return bytes * 2 + 1;
}
Ejemplo n.º 2
0
gboolean rm_digest_equal(RmDigest *a, RmDigest *b) {
    guint8 *buf_a = rm_digest_steal_buffer(a);
    guint8 *buf_b = rm_digest_steal_buffer(b);

    gboolean result = !memcmp(buf_a, buf_b, MIN(a->bytes, b->bytes));

    g_slice_free1(a->bytes, buf_a);
    g_slice_free1(b->bytes, buf_b);

    return result;
}
Ejemplo n.º 3
0
static void rm_directory_add_subdir(RmDirectory *parent, RmDirectory *subdir) {
    if(subdir->was_merged) {
        return;
    }

    parent->mergeups = subdir->mergeups + parent->mergeups + 1;
    parent->dupe_count += subdir->dupe_count;
    g_queue_push_head(&parent->children, subdir);
    parent->prefd_files += subdir->prefd_files;

#ifdef _RM_TREEMERGE_DEBUG
    g_printerr("%55s (%3ld/%3ld) <- %s (%3ld/%3ld)\n", parent->dirname,
               parent->dupe_count, parent->file_count, subdir->dirname,
               subdir->dupe_count, subdir->file_count);
#endif

    /**
     * Here's something weird:
     * - a counter is used and substraced at once from parent->dupe_count.
     * - it would ofc. be nicer to substract it step by step.
     * - but for some weird reasons this only works on clang, not gcc.
     * - yes, what. But I tested this, I promise!
     */
    for(GList *iter = subdir->known_files.head; iter; iter = iter->next) {
        int c = rm_directory_add(parent, (RmFile *)iter->data);
        parent->dupe_count -= c;
    }

    /* Inherit the child's checksum */
    unsigned char *subdir_cksum = rm_digest_steal_buffer(subdir->digest);
    rm_digest_update(parent->digest, subdir_cksum, subdir->digest->bytes);
    g_slice_free1(subdir->digest->bytes, subdir_cksum);

    subdir->was_merged = true;
}
Ejemplo n.º 4
0
guint rm_digest_hash(RmDigest *digest) {
    guint8 *buf = NULL;
    gsize bytes = 0;

    if(digest->type == RM_DIGEST_PARANOID) {
        buf = rm_digest_steal_buffer(digest->shadow_hash);
        bytes = digest->shadow_hash->bytes;
    } else {
        buf = rm_digest_steal_buffer(digest);
        bytes = digest->bytes;
    }

    guint hash = *(RmOff *)buf;
    g_slice_free1(bytes, buf);
    return hash;
}
Ejemplo n.º 5
0
static int rm_directory_add(RmDirectory *directory, RmFile *file) {
    /* Update the directorie's hash with the file's hash
       Since we cannot be sure in which order the files come in
       we have to add the hash cummulatively.
     */
    int new_dupes = 0;

    g_assert(file);
    g_assert(file->digest);
    g_assert(directory);

    guint8 *file_digest = NULL;
    RmOff digest_bytes = 0;

    if(file->digest->type == RM_DIGEST_PARANOID) {
        file_digest = rm_digest_steal_buffer(file->digest->shadow_hash);
        digest_bytes = file->digest->shadow_hash->bytes;
    } else {
        file_digest = rm_digest_steal_buffer(file->digest);
        digest_bytes = file->digest->bytes;
    }

    /* + and not XOR, since ^ would yield 0 for same hashes always. No matter
     * which hashes. Also this would be confusing. For me and for debuggers.
     */
    rm_digest_update(directory->digest, file_digest, digest_bytes);

    /* The file value is not really used, but we need some non-null value */
    g_hash_table_add(directory->hash_set, file->digest);

    g_slice_free1(digest_bytes, file_digest);

    if(file->hardlinks.is_head && file->hardlinks.files) {
        new_dupes = 1 + g_queue_get_length(file->hardlinks.files);
    } else {
        new_dupes = 1;
    }

    directory->dupe_count += new_dupes;
    directory->prefd_files += file->is_prefd;

    return new_dupes;
}