Ejemplo n.º 1
0
bool Downloader::plain_get(char *data, uint64_t &len) {
    uint64_t filelen = this->o->Size();
    uint64_t tmplen = 0;

RETRY:
    // confirm there is no more available data, done with this file
    if (this->readlen == filelen) {
        len = 0;
        return true;
    }

    BlockingBuffer *buf = buffers[this->chunkcount % this->num];

    // get data from this->magic_bytes, or buf->Read(), or both
    if (this->readlen < this->magic_bytes_num) {
        if ((this->readlen + len) <= this->magic_bytes_num) {
            memcpy(data, this->magic_bytes + this->readlen, len);
            tmplen = len;
        } else {
            memcpy(data, this->magic_bytes + this->readlen,
                   this->magic_bytes_num - this->readlen);
            tmplen = this->magic_bytes_num - this->readlen +
                     buf->Read(data + this->magic_bytes_num - this->readlen,
                               this->readlen + len - this->magic_bytes_num);
        }
    } else {
        tmplen = buf->Read(data, len);
    }

    this->readlen += tmplen;

    if (tmplen < len) {
        this->chunkcount++;
        if (buf->Error()) {
            S3ERROR("Error occurs while downloading, skip");
            return false;
        }
    }

    // retry to confirm whether thread reading is finished or chunk size is
    // divisible by get()'s buffer size
    if (tmplen == 0) {
        goto RETRY;
    }
    len = tmplen;

    // S3DEBUG("Got %llu, %llu / %llu", len, this->readlen, filelen);
    return true;
}
Ejemplo n.º 2
0
bool Downloader::get(char *data, uint64_t &len) {
    uint64_t filelen = this->o->Size();

RETRY:
    if (this->readlen == filelen) {
        len = 0;
        return true;
    }

    BlockingBuffer *buf = buffers[this->chunkcount % this->num];
    uint64_t tmplen = buf->Read(data, len);
    this->readlen += tmplen;
    if (tmplen < len) {
        this->chunkcount++;
        if (buf->Error()) {
            S3ERROR("Error occurs while downloading, skip");
            return false;
        }
    }

    // retry to confirm whether thread reading is finished or chunk size is
    // divisible by get()'s buffer size
    if (tmplen == 0) {
        goto RETRY;
    }
    len = tmplen;

    // S3DEBUG("Got %llu, %llu / %llu", len, this->readlen, filelen);
    return true;
}
Ejemplo n.º 3
0
bool Downloader::get(char *data, uint64_t &len) {
    if (this->magic_bytes_num == 0) {
        // get first 4(at least 2) bytes to check if this file is compressed
        BlockingBuffer *buf = buffers[this->chunkcount % this->num];

        if ((this->magic_bytes_num = buf->Read(
                 (char *)this->magic_bytes, sizeof(this->magic_bytes))) > 1) {
            if (!this->set_compression()) {
                return false;
            }
        }
    }

    switch (this->compression) {
        case S3_ZIP_GZIP:
            return this->zstream_get(data, len);
            break;
        default:
            return this->plain_get(data, len);
    }
}
Ejemplo n.º 4
0
bool Downloader::zstream_get(char *data, uint64_t &len) {
    uint64_t filelen = this->o->Size();

// S3_ZIP_CHUNKSIZE is simply the buffer size for feeding data to and
// pulling data from the zlib routines. 256K is recommended by zlib.
#define S3_ZIP_CHUNKSIZE 256 * 1024
    uint32_t left_out = 0;
    zstream_info *zinfo = this->z_info;
    z_stream *strm = &zinfo->zstream;

RETRY:
    // fail-safe, incase(very unlikely) there is a infinite-loop bug. For
    // instance, S3 returns wrong file size which is larger than actual the
    // number. Never happened, but better be careful.
    if (this->chunkcount > (this->o->Size() / this->o->Chunksize() + 2)) {
        if (zinfo->inited) {
            inflateEnd(strm);
        }
        len = 0;
        return false;
    }

    // no more available data to read, decompress or copy, done with this file
    if (this->readlen == filelen && !(zinfo->have_out - zinfo->done_out) &&
        !strm->avail_in) {
        if (zinfo->inited) {
            inflateEnd(strm);
        }
        len = 0;
        return true;
    }

    BlockingBuffer *buf = buffers[this->chunkcount % this->num];

    // strm is the structure used by zlib to decompress stream
    if (!zinfo->inited) {
        strm->zalloc = Z_NULL;
        strm->zfree = Z_NULL;
        strm->opaque = Z_NULL;
        strm->avail_in = 0;
        strm->next_in = Z_NULL;

        zinfo->in = (unsigned char *)malloc(S3_ZIP_CHUNKSIZE);
        zinfo->out = (unsigned char *)malloc(S3_ZIP_CHUNKSIZE);
        if (!zinfo->in || !zinfo->out) {
            S3ERROR("Failed to allocate memory");
            return false;
        }
        // 47 is the number of windows bits, to make sure zlib could recognize
        // and decode gzip stream
        if (inflateInit2(strm, 47) != Z_OK) {
            S3ERROR("Failed to init gzip function");
            return false;
        }

        zinfo->inited = true;
    }

    do {
        // copy decompressed data
        left_out = zinfo->have_out - zinfo->done_out;
        if (left_out > len) {
            memcpy(data, zinfo->out + zinfo->done_out, len);
            zinfo->done_out += len;
            break;
        } else if (left_out) {
            memcpy(data, zinfo->out + zinfo->done_out, left_out);
            zinfo->done_out = 0;
            zinfo->have_out = 0;
            len = left_out;
            break;
        }

        // get another decompressed chunk
        if (this->readlen && (strm->avail_in != 0)) {
            strm->avail_out = S3_ZIP_CHUNKSIZE;
            strm->next_out = zinfo->out;

            switch (inflate(strm, Z_NO_FLUSH)) {
                case Z_STREAM_ERROR:
                case Z_NEED_DICT:
                case Z_DATA_ERROR:
                case Z_MEM_ERROR:
                    S3ERROR("Failed to decompress data");
                    inflateEnd(strm);
                    return false;
            }

            zinfo->have_out = S3_ZIP_CHUNKSIZE - strm->avail_out;
        }

        // get another compressed chunk
        // from magic_bytes, or buf->Read(), or both
        if (!zinfo->have_out) {
            if (this->readlen < this->magic_bytes_num) {
                memcpy(zinfo->in, this->magic_bytes + this->readlen,
                       this->magic_bytes_num - this->readlen);
                strm->avail_in =
                    this->magic_bytes_num - this->readlen +
                    buf->Read((char *)zinfo->in + this->magic_bytes_num -
                                  this->readlen,
                              S3_ZIP_CHUNKSIZE - this->magic_bytes_num +
                                  this->readlen);
            } else {
                strm->avail_in = buf->Read((char *)zinfo->in, S3_ZIP_CHUNKSIZE);
            }

            if (buf->Error()) {
                S3ERROR("Error occurs while downloading, skip");
                inflateEnd(strm);
                return false;
            }
            strm->next_in = zinfo->in;

            // readlen is the read size of orig file, not the decompressed
            this->readlen += strm->avail_in;

            // done with *reading* this compressed file, still need to confirm
            // it's all decompressed and transferred/get()
            if (strm->avail_in == 0) {
                this->chunkcount++;
                goto RETRY;
            }
        }
    } while (1);

    return true;
}