/** * Transmit a literal and/or match token. * * This delightfully-named function is called either when we find a * match and need to transmit all the unmatched data leading up to it, * or when we get bored of accumulating literal data and just need to * transmit it. As a result of this second case, it is called even if * we have not matched at all! * * @param i If >0, the number of a matched token. If 0, indicates we * have only literal data. **/ static void matched(int f, struct sum_struct *s, struct map_struct *buf, OFF_T offset, int32 i) { int32 n = (int32)(offset - last_match); /* max value: block_size (int32) */ int32 j; if (verbose > 2 && i >= 0) { rprintf(FINFO, "match at %.0f last_match=%.0f j=%d len=%ld n=%ld\n", (double)offset, (double)last_match, i, (long)s->sums[i].len, (long)n); } send_token(f, i, buf, last_match, n, i < 0 ? 0 : s->sums[i].len); data_transfer += n; if (i >= 0) { stats.matched_data += s->sums[i].len; n += s->sums[i].len; } for (j = 0; j < n; j += CHUNK_SIZE) { int32 n1 = MIN(CHUNK_SIZE, n - j); sum_update(map_ptr(buf, last_match + j, n1), n1); } if (i >= 0) last_match = offset + s->sums[i].len; else last_match = offset; if (buf && do_progress) show_progress(last_match, buf->file_size); }
/* Transmit a literal and/or match token. * * This delightfully-named function is called either when we find a * match and need to transmit all the unmatched data leading up to it, * or when we get bored of accumulating literal data and just need to * transmit it. As a result of this second case, it is called even if * we have not matched at all! * * If i >= 0, the number of a matched token. If < 0, indicates we have * only literal data. A -1 will send a 0-token-int too, and a -2 sends * only literal data, w/o any token-int. */ static void matched(int f, struct sum_struct *s, struct map_struct *buf, OFF_T offset, int32 i) { int32 n = (int32)(offset - last_match); /* max value: block_size (int32) */ int32 j; if (DEBUG_GTE(DELTASUM, 2) && i >= 0) { rprintf(FINFO, "match at %s last_match=%s j=%d len=%ld n=%ld\n", big_num(offset), big_num(last_match), i, (long)s->sums[i].len, (long)n); } send_token(f, i, buf, last_match, n, i < 0 ? 0 : s->sums[i].len); data_transfer += n; if (i >= 0) { stats.matched_data += s->sums[i].len; n += s->sums[i].len; } for (j = 0; j < n; j += CHUNK_SIZE) { int32 n1 = MIN(CHUNK_SIZE, n - j); sum_update(map_ptr(buf, last_match + j, n1), n1); } if (i >= 0) last_match = offset + s->sums[i].len; else last_match = offset; if (buf && INFO_GTE(PROGRESS, 1)) show_progress(last_match, buf->file_size); }
/** * Generate a stream of signatures/checksums that describe a buffer. * * Generate approximately one checksum every @p n bytes. * * @return Newly-allocated sum_struct **/ static struct sum_struct *generate_sums(struct map_struct *buf, OFF_T len, int n) { int i; struct sum_struct *s; int count; int block_len = n; int remainder = (len % block_len); OFF_T offset = 0; count = (len + (block_len - 1)) / block_len; s = (struct sum_struct *) malloc(sizeof(*s)); if (!s) out_of_memory("generate_sums"); s->count = count; s->remainder = remainder; s->n = n; s->flength = len; if (count == 0) { s->sums = NULL; return s; } if (verbose > 3) { rprintf(FINFO, "count=%ld rem=%ld n=%ld flength=%.0f\n", (long) s->count, (long) s->remainder, (long) s->n, (double) s->flength); } s->sums = (struct sum_buf *) malloc(sizeof(s->sums[0]) * s->count); if (!s->sums) out_of_memory("generate_sums"); for (i = 0; i < count; i++) { int n1 = MIN(len, n); char *map = map_ptr(buf, offset, n1); s->sums[i].sum1 = get_checksum1(map, n1); get_checksum2(map, n1, s->sums[i].sum2); s->sums[i].offset = offset; s->sums[i].len = n1; s->sums[i].i = i; if (verbose > 3) rprintf(FINFO, "chunk[%d] offset=%.0f len=%d sum1=%08x\n", i, (double) s->sums[i].offset, s->sums[i].len, s->sums[i].sum1); len -= n1; offset += n1; } return s; }
void DoMapExport(const std::vector<Image16Bpp>& images, const std::vector<Image16Bpp>& tilesets) { if (tilesets.empty()) FatalLog("Map export specified however --tileset not given"); // Form the tileset from the images given this is a dummy std::shared_ptr<Tileset> tileset(new Tileset(tilesets, "", params.bpp, params.affine)); for (const auto& image : images) { std::shared_ptr<Exportable> map_ptr(new Map(image, tileset, params.affine)); header.Add(map_ptr); implementation.Add(map_ptr); } }
static void map_callback(const sensor_msgs::PointCloud2::ConstPtr& input) { if (map_loaded == 0) { // Convert the data type(from sensor_msgs to pcl). pcl::fromROSMsg(*input, map); pcl::PointCloud<pcl::PointXYZ>::Ptr map_ptr(new pcl::PointCloud<pcl::PointXYZ>(map)); // Setting point cloud to be aligned to. ndt.setInputTarget(map_ptr); // Setting NDT parameters to default values ndt.setMaximumIterations(iter); ndt.setResolution(ndt_res); ndt.setStepSize(step_size); ndt.setTransformationEpsilon(trans_eps); map_loaded = 1; } }
void DoMode0Export(const std::vector<Image16Bpp>& images) { // If split then form several maps // If !split then start a scene // Add appropriate object to header/implementation if (params.split) { for (const auto& image : images) { std::shared_ptr<Exportable> map_ptr(new Map(image, params.bpp, params.affine)); header.Add(map_ptr); implementation.Add(map_ptr); } } else { std::shared_ptr<Exportable> scene(new MapScene(images, params.symbol_base_name, params.bpp, params.affine)); header.Add(scene); implementation.Add(scene); } }
/** * Scan through a origin file, looking for sections that match * checksums from the generator, and transmit either literal or token * data. * * Also calculates the MD4 checksum of the whole file, using the md * accumulator. This is transmitted with the file as protection * against corruption on the wire. * * @param s Checksums received from the generator. If <tt>s->count == * 0</tt>, then there are actually no checksums for this file. * * @param len Length of the file to send. **/ void match_sums(int f, struct sum_struct *s, struct map_struct *buf, OFF_T len) { int sum_len; last_match = 0; false_alarms = 0; hash_hits = 0; matches = 0; data_transfer = 0; sum_init(xfersum_type, checksum_seed); if (append_mode > 0) { if (append_mode == 2) { OFF_T j = 0; for (j = CHUNK_SIZE; j < s->flength; j += CHUNK_SIZE) { if (buf && INFO_GTE(PROGRESS, 1)) show_progress(last_match, buf->file_size); sum_update(map_ptr(buf, last_match, CHUNK_SIZE), CHUNK_SIZE); last_match = j; } if (last_match < s->flength) { int32 n = (int32)(s->flength - last_match); if (buf && INFO_GTE(PROGRESS, 1)) show_progress(last_match, buf->file_size); sum_update(map_ptr(buf, last_match, n), n); } } last_match = s->flength; s->count = 0; } if (len > 0 && s->count > 0) { build_hash_table(s); if (DEBUG_GTE(DELTASUM, 2)) rprintf(FINFO,"built hash table\n"); hash_search(f, s, buf, len); if (DEBUG_GTE(DELTASUM, 2)) rprintf(FINFO,"done hash search\n"); } else { OFF_T j; /* by doing this in pieces we avoid too many seeks */ for (j = last_match + CHUNK_SIZE; j < len; j += CHUNK_SIZE) matched(f, s, buf, j, -2); matched(f, s, buf, len, -1); } sum_len = sum_end(sender_file_sum); /* If we had a read error, send a bad checksum. We use all bits * off as long as the checksum doesn't happen to be that, in * which case we turn the last 0 bit into a 1. */ if (buf && buf->status != 0) { int i; for (i = 0; i < sum_len && sender_file_sum[i] == 0; i++) {} memset(sender_file_sum, 0, sum_len); if (i == sum_len) sender_file_sum[i-1]++; } if (DEBUG_GTE(DELTASUM, 2)) rprintf(FINFO,"sending file_sum\n"); write_buf(f, sender_file_sum, sum_len); if (DEBUG_GTE(DELTASUM, 2)) { rprintf(FINFO, "false_alarms=%d hash_hits=%d matches=%d\n", false_alarms, hash_hits, matches); } total_hash_hits += hash_hits; total_false_alarms += false_alarms; total_matches += matches; stats.literal_data += data_transfer; }
static void hash_search(int f,struct sum_struct *s, struct map_struct *buf, OFF_T len) { OFF_T offset, aligned_offset, end; int32 k, want_i, aligned_i, backup; char sum2[SUM_LENGTH]; uint32 s1, s2, sum; int more; schar *map; /* want_i is used to encourage adjacent matches, allowing the RLL * coding of the output to work more efficiently. */ want_i = 0; if (DEBUG_GTE(DELTASUM, 2)) { rprintf(FINFO, "hash search b=%ld len=%s\n", (long)s->blength, big_num(len)); } k = (int32)MIN(len, (OFF_T)s->blength); map = (schar *)map_ptr(buf, 0, k); sum = get_checksum1((char *)map, k); s1 = sum & 0xFFFF; s2 = sum >> 16; if (DEBUG_GTE(DELTASUM, 3)) rprintf(FINFO, "sum=%.8x k=%ld\n", sum, (long)k); offset = aligned_offset = aligned_i = 0; end = len + 1 - s->sums[s->count-1].len; if (DEBUG_GTE(DELTASUM, 3)) { rprintf(FINFO, "hash search s->blength=%ld len=%s count=%s\n", (long)s->blength, big_num(len), big_num(s->count)); } do { int done_csum2 = 0; uint32 hash_entry; int32 i, *prev; if (DEBUG_GTE(DELTASUM, 4)) { rprintf(FINFO, "offset=%s sum=%04x%04x\n", big_num(offset), s2 & 0xFFFF, s1 & 0xFFFF); } if (tablesize == TRADITIONAL_TABLESIZE) { hash_entry = SUM2HASH2(s1,s2); if ((i = hash_table[hash_entry]) < 0) goto null_hash; sum = (s1 & 0xffff) | (s2 << 16); } else { sum = (s1 & 0xffff) | (s2 << 16); hash_entry = BIG_SUM2HASH(sum); if ((i = hash_table[hash_entry]) < 0) goto null_hash; } prev = &hash_table[hash_entry]; hash_hits++; do { int32 l; /* When updating in-place, the chunk's offset must be * either >= our offset or identical data at that offset. * Remove any bypassed entries that we can never use. */ if (updating_basis_file && s->sums[i].offset < offset && !(s->sums[i].flags & SUMFLG_SAME_OFFSET)) { *prev = s->sums[i].chain; continue; } prev = &s->sums[i].chain; if (sum != s->sums[i].sum1) continue; /* also make sure the two blocks are the same length */ l = (int32)MIN((OFF_T)s->blength, len-offset); if (l != s->sums[i].len) continue; if (DEBUG_GTE(DELTASUM, 3)) { rprintf(FINFO, "potential match at %s i=%ld sum=%08x\n", big_num(offset), (long)i, sum); } if (!done_csum2) { map = (schar *)map_ptr(buf,offset,l); get_checksum2((char *)map,l,sum2); done_csum2 = 1; } if (memcmp(sum2,s->sums[i].sum2,s->s2length) != 0) { false_alarms++; continue; } /* When updating in-place, the best possible match is * one with an identical offset, so we prefer that over * the adjacent want_i optimization. */ if (updating_basis_file) { /* All the generator's chunks start at blength boundaries. */ while (aligned_offset < offset) { aligned_offset += s->blength; aligned_i++; } if ((offset == aligned_offset || (sum == 0 && l == s->blength && aligned_offset + l <= len)) && aligned_i < s->count) { if (i != aligned_i) { if (sum != s->sums[aligned_i].sum1 || l != s->sums[aligned_i].len || memcmp(sum2, s->sums[aligned_i].sum2, s->s2length) != 0) goto check_want_i; i = aligned_i; } if (offset != aligned_offset) { /* We've matched some zeros in a spot that is also zeros * further along in the basis file, if we find zeros ahead * in the sender's file, we'll output enough literal data * to re-align with the basis file, and get back to seeking * instead of writing. */ backup = (int32)(aligned_offset - last_match); if (backup < 0) backup = 0; map = (schar *)map_ptr(buf, aligned_offset - backup, l + backup) + backup; sum = get_checksum1((char *)map, l); if (sum != s->sums[i].sum1) goto check_want_i; get_checksum2((char *)map, l, sum2); if (memcmp(sum2, s->sums[i].sum2, s->s2length) != 0) goto check_want_i; /* OK, we have a re-alignment match. Bump the offset * forward to the new match point. */ offset = aligned_offset; } /* This identical chunk is in the same spot in the old and new file. */ s->sums[i].flags |= SUMFLG_SAME_OFFSET; want_i = i; } } check_want_i: /* we've found a match, but now check to see * if want_i can hint at a better match. */ if (i != want_i && want_i < s->count && (!updating_basis_file || s->sums[want_i].offset >= offset || s->sums[want_i].flags & SUMFLG_SAME_OFFSET) && sum == s->sums[want_i].sum1 && memcmp(sum2, s->sums[want_i].sum2, s->s2length) == 0) { /* we've found an adjacent match - the RLL coder * will be happy */ i = want_i; } want_i = i + 1; matched(f,s,buf,offset,i); offset += s->sums[i].len - 1; k = (int32)MIN((OFF_T)s->blength, len-offset); map = (schar *)map_ptr(buf, offset, k); sum = get_checksum1((char *)map, k); s1 = sum & 0xFFFF; s2 = sum >> 16; matches++; break; } while ((i = s->sums[i].chain) >= 0); null_hash: backup = (int32)(offset - last_match); /* We sometimes read 1 byte prior to last_match... */ if (backup < 0) backup = 0; /* Trim off the first byte from the checksum */ more = offset + k < len; map = (schar *)map_ptr(buf, offset - backup, k + more + backup) + backup; s1 -= map[0] + CHAR_OFFSET; s2 -= k * (map[0]+CHAR_OFFSET); /* Add on the next byte (if there is one) to the checksum */ if (more) { s1 += map[k] + CHAR_OFFSET; s2 += s1; } else --k; /* By matching early we avoid re-reading the data 3 times in the case where a token match comes a long way after last match. The 3 reads are caused by the running match, the checksum update and the literal send. */ if (backup >= s->blength+CHUNK_SIZE && end-offset > CHUNK_SIZE) matched(f, s, buf, offset - s->blength, -2); } while (++offset < end); matched(f, s, buf, len, -1); map_ptr(buf, len-1, 1); }
static int receive_data(int f_in, char *fname_r, int fd_r, OFF_T size_r, const char *fname, int fd, OFF_T total_size) { static char file_sum1[MAX_DIGEST_LEN]; static char file_sum2[MAX_DIGEST_LEN]; struct map_struct *mapbuf; struct sum_struct sum; int32 len, sum_len; OFF_T offset = 0; OFF_T offset2; char *data; int32 i; char *map = NULL; read_sum_head(f_in, &sum); if (fd_r >= 0 && size_r > 0) { int32 read_size = MAX(sum.blength * 2, 16*1024); mapbuf = map_file(fd_r, size_r, read_size, sum.blength); if (verbose > 2) { rprintf(FINFO, "recv mapped %s of size %.0f\n", fname_r, (double)size_r); } } else mapbuf = NULL; sum_init(checksum_seed); if (append_mode > 0) { OFF_T j; sum.flength = (OFF_T)sum.count * sum.blength; if (sum.remainder) sum.flength -= sum.blength - sum.remainder; if (append_mode == 2) { for (j = CHUNK_SIZE; j < sum.flength; j += CHUNK_SIZE) { if (do_progress) show_progress(offset, total_size); sum_update(map_ptr(mapbuf, offset, CHUNK_SIZE), CHUNK_SIZE); offset = j; } if (offset < sum.flength) { int32 len = (int32)(sum.flength - offset); if (do_progress) show_progress(offset, total_size); sum_update(map_ptr(mapbuf, offset, len), len); } } offset = sum.flength; if (fd != -1 && (j = do_lseek(fd, offset, SEEK_SET)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)j, (double)offset); exit_cleanup(RERR_FILEIO); } } while ((i = recv_token(f_in, &data)) != 0) { if (do_progress) show_progress(offset, total_size); if (i > 0) { if (verbose > 3) { rprintf(FINFO,"data recv %d at %.0f\n", i,(double)offset); } stats.literal_data += i; cleanup_got_literal = 1; sum_update(data, i); if (fd != -1 && write_file(fd,data,i) != i) goto report_write_error; offset += i; continue; } i = -(i+1); offset2 = i * (OFF_T)sum.blength; len = sum.blength; if (i == (int)sum.count-1 && sum.remainder != 0) len = sum.remainder; stats.matched_data += len; if (verbose > 3) { rprintf(FINFO, "chunk[%d] of size %ld at %.0f offset=%.0f\n", i, (long)len, (double)offset2, (double)offset); } if (mapbuf) { map = map_ptr(mapbuf,offset2,len); see_token(map, len); sum_update(map, len); } if (updating_basis_or_equiv) { if (offset == offset2 && fd != -1) { OFF_T pos; if (flush_write_file(fd) < 0) goto report_write_error; offset += len; if ((pos = do_lseek(fd, len, SEEK_CUR)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)pos, (double)offset); exit_cleanup(RERR_FILEIO); } continue; } } if (fd != -1 && map && write_file(fd, map, len) != (int)len) goto report_write_error; offset += len; } if (flush_write_file(fd) < 0) goto report_write_error; #ifdef HAVE_FTRUNCATE if (inplace && fd != -1 && ftruncate(fd, offset) < 0) { rsyserr(FERROR_XFER, errno, "ftruncate failed on %s", full_fname(fname)); } #endif if (do_progress) end_progress(total_size); if (fd != -1 && offset > 0 && sparse_end(fd) != 0) { report_write_error: rsyserr(FERROR_XFER, errno, "write failed on %s", full_fname(fname)); exit_cleanup(RERR_FILEIO); } sum_len = sum_end(file_sum1); if (mapbuf) unmap_file(mapbuf); read_buf(f_in, file_sum2, sum_len); if (verbose > 2) rprintf(FINFO,"got file_sum\n"); if (fd != -1 && memcmp(file_sum1, file_sum2, sum_len) != 0) return 0; return 1; }
/** * Scan through a origin file, looking for sections that match * checksums from the generator, and transmit either literal or token * data. * * Also calculates the MD4 checksum of the whole file, using the md * accumulator. This is transmitted with the file as protection * against corruption on the wire. * * @param s Checksums received from the generator. If <tt>s->count == * 0</tt>, then there are actually no checksums for this file. * * @param len Length of the file to send. **/ void match_sums(int f, struct sum_struct *s, struct map_struct *buf, OFF_T len) { char file_sum[MD4_SUM_LENGTH]; last_match = 0; false_alarms = 0; hash_hits = 0; matches = 0; data_transfer = 0; sum_init(checksum_seed); if (append_mode > 0) { OFF_T j = 0; for (j = CHUNK_SIZE; j < s->flength; j += CHUNK_SIZE) { if (buf && do_progress) show_progress(last_match, buf->file_size); sum_update(map_ptr(buf, last_match, CHUNK_SIZE), CHUNK_SIZE); last_match = j; } if (last_match < s->flength) { int32 len = (int32)(s->flength - last_match); if (buf && do_progress) show_progress(last_match, buf->file_size); sum_update(map_ptr(buf, last_match, len), len); last_match = s->flength; } s->count = 0; } if (len > 0 && s->count > 0) { build_hash_table(s); if (verbose > 2) rprintf(FINFO,"built hash table\n"); hash_search(f,s,buf,len); if (verbose > 2) rprintf(FINFO,"done hash search\n"); } else { OFF_T j; /* by doing this in pieces we avoid too many seeks */ for (j = last_match + CHUNK_SIZE; j < len; j += CHUNK_SIZE) matched(f, s, buf, j, -2); matched(f, s, buf, len, -1); } sum_end(file_sum); /* If we had a read error, send a bad checksum. */ if (buf && buf->status != 0) file_sum[0]++; if (verbose > 2) rprintf(FINFO,"sending file_sum\n"); write_buf(f,file_sum,MD4_SUM_LENGTH); if (verbose > 2) rprintf(FINFO, "false_alarms=%d hash_hits=%d matches=%d\n", false_alarms, hash_hits, matches); total_hash_hits += hash_hits; total_false_alarms += false_alarms; total_matches += matches; stats.literal_data += data_transfer; }
static void hash_search(int f,struct sum_struct *s, struct map_struct *buf, OFF_T len) { OFF_T offset, end; int32 k, want_i, backup; char sum2[SUM_LENGTH]; uint32 s1, s2, sum; int more; schar *map; /* want_i is used to encourage adjacent matches, allowing the RLL * coding of the output to work more efficiently. */ want_i = 0; if (verbose > 2) { rprintf(FINFO, "hash search b=%ld len=%.0f\n", (long)s->blength, (double)len); } k = (int32)MIN(len, (OFF_T)s->blength); map = (schar *)map_ptr(buf, 0, k); sum = get_checksum1((char *)map, k); s1 = sum & 0xFFFF; s2 = sum >> 16; if (verbose > 3) rprintf(FINFO, "sum=%.8x k=%ld\n", sum, (long)k); offset = 0; end = len + 1 - s->sums[s->count-1].len; if (verbose > 3) { rprintf(FINFO, "hash search s->blength=%ld len=%.0f count=%.0f\n", (long)s->blength, (double)len, (double)s->count); } do { int done_csum2 = 0; int32 i; if (verbose > 4) { rprintf(FINFO, "offset=%.0f sum=%04x%04x\n", (double)offset, s2 & 0xFFFF, s1 & 0xFFFF); } i = hash_table[SUM2HASH2(s1,s2)]; if (i < 0) goto null_hash; sum = (s1 & 0xffff) | (s2 << 16); hash_hits++; do { int32 l; if (sum != s->sums[i].sum1) continue; /* also make sure the two blocks are the same length */ l = (int32)MIN((OFF_T)s->blength, len-offset); if (l != s->sums[i].len) continue; /* in-place: ensure chunk's offset is either >= our * offset or that the data didn't move. */ if (updating_basis_file && s->sums[i].offset < offset && !(s->sums[i].flags & SUMFLG_SAME_OFFSET)) continue; if (verbose > 3) { rprintf(FINFO, "potential match at %.0f i=%ld sum=%08x\n", (double)offset, (long)i, sum); } if (!done_csum2) { map = (schar *)map_ptr(buf,offset,l); get_checksum2((char *)map,l,sum2); done_csum2 = 1; } if (memcmp(sum2,s->sums[i].sum2,s->s2length) != 0) { false_alarms++; continue; } /* When updating in-place, the best possible match is * one with an identical offset, so we prefer that over * the following want_i optimization. */ if (updating_basis_file) { int32 i2; for (i2 = i; i2 >= 0; i2 = s->sums[i2].chain) { if (s->sums[i2].offset != offset) continue; if (i2 != i) { if (sum != s->sums[i2].sum1) break; if (memcmp(sum2, s->sums[i2].sum2, s->s2length) != 0) break; i = i2; } /* This chunk was at the same offset on * both the sender and the receiver. */ s->sums[i].flags |= SUMFLG_SAME_OFFSET; goto set_want_i; } } /* we've found a match, but now check to see * if want_i can hint at a better match. */ if (i != want_i && want_i < s->count && (!updating_basis_file || s->sums[want_i].offset >= offset || s->sums[want_i].flags & SUMFLG_SAME_OFFSET) && sum == s->sums[want_i].sum1 && memcmp(sum2, s->sums[want_i].sum2, s->s2length) == 0) { /* we've found an adjacent match - the RLL coder * will be happy */ i = want_i; } set_want_i: want_i = i + 1; matched(f,s,buf,offset,i); offset += s->sums[i].len - 1; k = (int32)MIN((OFF_T)s->blength, len-offset); map = (schar *)map_ptr(buf, offset, k); sum = get_checksum1((char *)map, k); s1 = sum & 0xFFFF; s2 = sum >> 16; matches++; break; } while ((i = s->sums[i].chain) >= 0); null_hash: backup = (int32)(offset - last_match); /* We sometimes read 1 byte prior to last_match... */ if (backup < 0) backup = 0; /* Trim off the first byte from the checksum */ more = offset + k < len; map = (schar *)map_ptr(buf, offset - backup, k + more + backup) + backup; s1 -= map[0] + CHAR_OFFSET; s2 -= k * (map[0]+CHAR_OFFSET); /* Add on the next byte (if there is one) to the checksum */ if (more) { s1 += map[k] + CHAR_OFFSET; s2 += s1; } else --k; /* By matching early we avoid re-reading the data 3 times in the case where a token match comes a long way after last match. The 3 reads are caused by the running match, the checksum update and the literal send. */ if (backup >= s->blength+CHUNK_SIZE && end-offset > CHUNK_SIZE) matched(f, s, buf, offset - s->blength, -2); } while (++offset < end); matched(f, s, buf, len, -1); map_ptr(buf, len-1, 1); }
/// create empty map /// var var::new_map() { return var(map_ptr(new map_t)); }
static int receive_data(int f_in, char *fname_r, int fd_r, OFF_T size_r, const char *fname, int fd, OFF_T total_size, struct sum_struct *sum, int numMatchTokens, int nextToken, char *nextData, char *file_sum2) { static char file_sum1[MAX_DIGEST_LEN]; struct map_struct *mapbuf; int32 len, sum_len; OFF_T offset = 0; OFF_T offset2; int offsetDefer; char *data; int32 i; char *map = NULL; int replayTokenCnt = 0; if (fd_r >= 0 && size_r > 0) { int32 read_size = MAX(sum->blength * 2, 16*1024); mapbuf = map_file(fd_r, size_r, read_size, sum->blength); if (verbose > 2) { rprintf(FINFO, "recv mapped %s of size %.0f\n", fname_r, (double)size_r); } } else mapbuf = NULL; sum_init(checksum_seed); if (append_mode > 0) { OFF_T j; sum->flength = (OFF_T)sum->count * sum->blength; if (sum->remainder) sum->flength -= sum->blength - sum->remainder; if (append_mode == 2) { for (j = CHUNK_SIZE; j < sum->flength; j += CHUNK_SIZE) { if (do_progress) show_progress(offset, total_size); sum_update(map_ptr(mapbuf, offset, CHUNK_SIZE), CHUNK_SIZE); offset = j; } if (offset < sum->flength) { int32 len = (int32)(sum->flength - offset); if (do_progress) show_progress(offset, total_size); sum_update(map_ptr(mapbuf, offset, len), len); } } offset = sum->flength; if (fd != -1 && (j = do_lseek(fd, offset, SEEK_SET)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)j, (double)offset); exit_cleanup(RERR_FILEIO); } } offsetDefer = 0; while ( 1 ) { /* * We have to replay any tokens that were potentially read-ahead * to see if the file was identical. * numMatchTokens < 0 means there are no replay tokens * numMatchTokens >= 0 means there are numMatchTokens from -1 * to -numMatchTokens, followed by * (nextToken, *nextData). * * If numMatchTokens >= 0 and nextToken == 0, then then file_sum * was already ready from f_in. Otherwise, we need to read it * here. */ if ( replayTokenCnt >= 0 && numMatchTokens >= 0 ) { if ( replayTokenCnt < numMatchTokens ) { /* * replay -1, -2, ..., -numMatchTokens */ i = -replayTokenCnt - 1; replayTokenCnt++; } else { /* * replay the next token - after this we are * up to date. */ i = nextToken; data = nextData; replayTokenCnt = -1; } } else { i = recv_token(f_in, &data); } if ( i == 0 ) break; if (do_progress) show_progress(offset, total_size); if (i > 0) { if (verbose > 3) { rprintf(FINFO,"data recv %d at %.0f\n", i,(double)offset); } stats.literal_data += i; cleanup_got_literal = 1; sum_update(data, i); if ( offsetDefer ) { OFF_T pos; if (flush_write_file(fd) < 0) goto report_write_error; if ((pos = do_lseek(fd, offset, SEEK_SET)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)pos, (double)offset); exit_cleanup(RERR_FILEIO); } offsetDefer = 0; } if (fd != -1 && write_file(fd,data,i) != i) goto report_write_error; offset += i; continue; } i = -(i+1); offset2 = i * (OFF_T)sum->blength; len = sum->blength; if (i == (int)sum->count-1 && sum->remainder != 0) len = sum->remainder; stats.matched_data += len; if (verbose > 3) { rprintf(FINFO, "chunk[%d] of size %ld at %.0f offset=%.0f%s\n", i, (long)len, (double)offset2, (double)offset, updating_basis_or_equiv && offset == offset2 ? " (seek)" : ""); } if (mapbuf) { map = map_ptr(mapbuf,offset2,len); see_token(map, len); sum_update(map, len); } if (updating_basis_or_equiv) { if (offset == offset2 && fd != -1) { offset += len; offsetDefer = 1; continue; } } if (fd != -1 && map) { if ( offsetDefer ) { OFF_T pos; if (flush_write_file(fd) < 0) goto report_write_error; if ((pos = do_lseek(fd, offset, SEEK_SET)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)pos, (double)offset); exit_cleanup(RERR_FILEIO); } offsetDefer = 0; } if ( write_file(fd, map, len) != (int)len) goto report_write_error; } offset += len; } if ( offsetDefer ) { OFF_T pos; if (flush_write_file(fd) < 0) goto report_write_error; if ((pos = do_lseek(fd, offset, SEEK_SET)) != offset) { rsyserr(FERROR_XFER, errno, "lseek of %s returned %.0f, not %.0f", full_fname(fname), (double)pos, (double)offset); exit_cleanup(RERR_FILEIO); } offsetDefer = 0; } if (flush_write_file(fd) < 0) goto report_write_error; #ifdef HAVE_FTRUNCATE if (fd != -1 && do_ftruncate(fd, offset) < 0) { rsyserr(FERROR_XFER, errno, "ftruncate failed on %s", full_fname(fname)); } #endif if (do_progress) end_progress(total_size); if (fd != -1 && offset > 0 && sparse_end(fd, offset) != 0) { report_write_error: rsyserr(FERROR_XFER, errno, "write failed on %s", full_fname(fname)); exit_cleanup(RERR_FILEIO); } sum_len = sum_end(file_sum1); if (mapbuf) unmap_file(mapbuf); if ( numMatchTokens < 0 || nextToken != 0 ) { /* * If numMatchTokens >= 0 and nextToken == 0, then the caller already * read ahead to the digest. Otherwise we have to read it here. */ read_buf(f_in, file_sum2, sum_len); } if (verbose > 2) rprintf(FINFO,"got file_sum\n"); if (fd != -1 && memcmp(file_sum1, file_sum2, sum_len) != 0) return 0; return 1; }
map_ptr buffer::map(GLintptr Offset, GLsizeiptr Length, GLbitfield Flags) const { return map_ptr(this->Name, Offset, Length, Flags); }
static int receive_data(int f_in, char *fname_r, int fd_r, OFF_T size_r, char *fname, int fd, OFF_T total_size) { static char file_sum1[MD4_SUM_LENGTH]; static char file_sum2[MD4_SUM_LENGTH]; struct map_struct *mapbuf; struct sum_struct sum; int32 len; OFF_T offset = 0; OFF_T offset2; char *data; int32 i; char *map = NULL; read_sum_head(f_in, &sum); if (fd_r >= 0 && size_r > 0) { int32 read_size = MAX(sum.blength * 2, 16*1024); mapbuf = map_file(fd_r, size_r, read_size, sum.blength); if (verbose > 2) { rprintf(FINFO, "recv mapped %s of size %.0f\n", safe_fname(fname_r), (double)size_r); } } else mapbuf = NULL; sum_init(checksum_seed); while ((i = recv_token(f_in, &data)) != 0) { if (do_progress) show_progress(offset, total_size); if (i > 0) { if (verbose > 3) { rprintf(FINFO,"data recv %d at %.0f\n", i,(double)offset); } stats.literal_data += i; cleanup_got_literal = 1; sum_update(data, i); if (fd != -1 && write_file(fd,data,i) != i) goto report_write_error; offset += i; continue; } i = -(i+1); offset2 = i * (OFF_T)sum.blength; len = sum.blength; if (i == (int)sum.count-1 && sum.remainder != 0) len = sum.remainder; stats.matched_data += len; if (verbose > 3) { rprintf(FINFO, "chunk[%d] of size %ld at %.0f offset=%.0f\n", i, (long)len, (double)offset2, (double)offset); } if (mapbuf) { map = map_ptr(mapbuf,offset2,len); see_token(map, len); sum_update(map, len); } if (inplace) { if (offset == offset2 && fd != -1) { if (flush_write_file(fd) < 0) goto report_write_error; offset += len; if (do_lseek(fd, len, SEEK_CUR) != offset) { rsyserr(FERROR, errno, "lseek failed on %s", full_fname(fname)); exit_cleanup(RERR_FILEIO); } continue; } } if (fd != -1 && map && write_file(fd, map, len) != (int)len) goto report_write_error; offset += len; } if (flush_write_file(fd) < 0) goto report_write_error; #ifdef HAVE_FTRUNCATE if (inplace && fd != -1) ftruncate(fd, offset); #endif if (do_progress) end_progress(total_size); if (fd != -1 && offset > 0 && sparse_end(fd) != 0) { report_write_error: rsyserr(FERROR, errno, "write failed on %s", full_fname(fname)); exit_cleanup(RERR_FILEIO); } sum_end(file_sum1); if (mapbuf) unmap_file(mapbuf); read_buf(f_in,file_sum2,MD4_SUM_LENGTH); if (verbose > 2) rprintf(FINFO,"got file_sum\n"); if (fd != -1 && memcmp(file_sum1, file_sum2, MD4_SUM_LENGTH) != 0) return 0; return 1; }