IV PerlIOMmap_unmap(pTHX_ PerlIO *f) { PerlIOMmap * const m = PerlIOSelf(f, PerlIOMmap); IV code = 0; if (m->len) { PerlIOBuf * const b = &m->base; if (b->buf) { /* The munmap address argument is tricky: depending on the * standard it is either "void *" or "caddr_t" (which is * usually "char *" (signed or unsigned). If we cast it * to "void *", those that have it caddr_t and an uptight * C++ compiler, will freak out. But casting it as char* * should work. Maybe. (Using Mmap_t figured out by * Configure doesn't always work, apparently.) */ code = munmap((char*)m->mptr, m->len); b->buf = NULL; m->len = 0; m->mptr = NULL; if (PerlIO_seek(PerlIONext(f), b->posn, SEEK_SET) != 0) code = -1; } b->ptr = b->end = b->buf; PerlIOBase(f)->flags &= ~(PERLIO_F_RDBUF | PERLIO_F_WRBUF); } return code; }
int #ifdef USE_SFIO PerlIO_getpos(PerlIO *f, Off_t *pos) { *pos = PerlIO_seek(f,0,0); return 0; }
void _wavpack_skip(wvpinfo *wvp, uint32_t size) { if ( buffer_len(wvp->buf) >= size ) { //buffer_dump(mp4->buf, size); buffer_consume(wvp->buf, size); DEBUG_TRACE(" skipped buffer data size %d\n", size); } else { PerlIO_seek(wvp->infile, size - buffer_len(wvp->buf), SEEK_CUR); buffer_clear(wvp->buf); DEBUG_TRACE(" seeked past %d bytes to %d\n", size, (int)PerlIO_tell(wvp->infile)); } }
int csv_select_run_child( csv_parse_t *parse, csv_result_set_t *result, size_t table_pos ) { csv_select_t *select = parse->select; csv_table_def_t *table, *table2; char *s1, *s2; int ch, has_child = select->table_count - table_pos > 1; size_t i, j, k, l, m, rp = 0, nr = 0, l1; Expr *x1; ExprList *xl_cols = select->columns; csv_row_t *row; csv_var_t *v1, v2; csv_column_def_t *col; csv_t *csv = parse->csv; /* reset table */ table = select->tables[table_pos]; PerlIO_seek( table->dstream, table->header_offset, SEEK_SET ); if( table->flags & TEXT_TABLE_COLNAMEHEADER ) { if( ! table->header_offset ) { do { ch = PerlIO_getc( table->dstream ); } while( ch != EOF && ch != '\n' ); table->header_offset = PerlIO_tell( table->dstream ); } } table->row_pos = 0; /* read rows */ while( (s1 = csv_read_row( table )) != NULL ) { #ifdef CSV_DEBUG _debug( "table '%s' row %lu\n", table->name, table->row_pos ); #endif if( has_child ) { ch = csv_select_run_child( parse, result, table_pos + 1 ); if( ch != CSV_OK ) return ch; continue; } /* eval WHERE */ if( (x1 = select->where) != NULL ) { ch = expr_eval( parse, x1 ); if( ch != CSV_OK ) return ch; if( x1->var.flags & VAR_HAS_IV ) { if( ! x1->var.iv ) continue; } else if( x1->var.flags & VAR_HAS_NV ) { if( ! x1->var.nv ) continue; } else if( x1->var.sv == NULL || x1->var.sv[0] == '0' ) continue; } /* eval columns */ for( i = 0; i < xl_cols->expr_count; i ++ ) { x1 = xl_cols->expr[i]; if( (x1->flags & EXPR_IS_AGGR) == 0 ) { ch = expr_eval( parse, x1 ); if( ch != CSV_OK ) return ch; } } if( select->groupby == NULL ) goto add_row; /* eval groupby */ for( i = 0; i < select->groupby->expr_count; i ++ ) { ch = expr_eval( parse, select->groupby->expr[i] ); if( ch != CSV_OK ) return ch; } /* search groupby row */ for( j = 0; j < result->row_count; j ++ ) { row = result->rows[j]; for( i = 0; i < select->groupby->expr_count; i ++ ) { v1 = &row->groupby[i]; x1 = select->groupby->expr[i]; VAR_COMP_OP( v2, *v1, x1->var, ==, "==", 0, 0 ); if( ! v2.iv ) goto groupby_next; } goto eval_row; groupby_next: continue; } add_row: if( select->offset && rp < select->offset ) { rp ++; continue; } if( select->limit && rp - select->offset >= select->limit ) goto finish; rp ++; Newxz( row, 1, csv_row_t ); row->select = select; if( (result->row_count % ROW_COUNT_EXPAND) == 0 ) { Renew( result->rows, result->row_count + ROW_COUNT_EXPAND, csv_row_t * ); } result->rows[result->row_count ++] = row; Newxz( row->data, result->column_count, csv_var_t ); if( select->orderby != NULL ) Newxz( row->orderby, select->orderby->expr_count, csv_var_t ); if( select->groupby != NULL ) Newxz( row->groupby, select->groupby->expr_count, csv_var_t ); nr = 1; eval_row: for( i = 0, j = 0; i < xl_cols->expr_count; i ++, j ++ ) { x1 = xl_cols->expr[i]; if( x1->op == TK_TABLE ) { table2 = x1->table; for( l = 0; l < table2->column_count; l ++, j ++ ) { col = table2->columns + l; v1 = row->data + j; s1 = table->data + col->offset; l1 = col->length; if( l1 == 4 && strcmp( s1, "NULL" ) == 0 ) continue; switch( col->type ) { case FIELD_TYPE_INTEGER: v1->iv = atoi( s1 ); v1->flags = VAR_HAS_IV; break; case FIELD_TYPE_DOUBLE: if( table2->decimal_symbol != '.' ) { s2 = strchr( s1, table2->decimal_symbol ); if( s2 != NULL ) *s2 = '.'; } v1->nv = atof( s1 ); v1->flags = VAR_HAS_NV; break; case FIELD_TYPE_CHAR: v1->sv_len = charset_convert( s1, l1, table2->charset, csv->client_charset, &v1->sv ); v1->flags = VAR_HAS_SV; break; case FIELD_TYPE_BLOB: if( l1 >= 2 && s1[0] == '0' && s1[1] == 'x' ) { l1 = (l1 - 2) / 2; if( ! l1 ) { v1->flags = 0; continue; } Renew( v1->sv, l1 + 1, char ); for( m = 0, s1 += 2; m < l1; m ++ ) { ch = CHAR_FROM_HEX[*s1 & 0x7f] << 4; s1 ++; ch += CHAR_FROM_HEX[*s1 & 0x7f]; s1 ++; v1->sv[m] = (char) ch; } v1->sv[l1] = '\0'; v1->sv_len = l1; v1->flags = VAR_HAS_SV; break; } default: Renew( v1->sv, l1 + 1, char ); Copy( s1, v1->sv, l1, char ); v1->sv[l1] = '\0'; v1->sv_len = l1; v1->flags = VAR_HAS_SV; break; } } continue; }
int image_gif_load(image *im) { int x, y, ofs; GifRecordType RecordType; GifPixelType *line = NULL; int ExtFunction = 0; GifByteType *ExtData; SavedImage *sp; SavedImage temp_save; int BackGround = 0; int trans_index = 0; // transparent index if any ColorMapObject *ColorMap; GifColorType *ColorMapEntry; temp_save.ExtensionBlocks = NULL; temp_save.ExtensionBlockCount = 0; // If reusing the object a second time, start over if (im->used) { DEBUG_TRACE("Recreating giflib objects\n"); image_gif_finish(im); if (im->fh != NULL) { // reset file to begining of image PerlIO_seek(im->fh, im->image_offset, SEEK_SET); } else { // reset SV read im->sv_offset = im->image_offset; } buffer_clear(im->buf); image_gif_read_header(im); } do { if (DGifGetRecordType(im->gif, &RecordType) == GIF_ERROR) { warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } switch (RecordType) { case IMAGE_DESC_RECORD_TYPE: if (DGifGetImageDesc(im->gif) == GIF_ERROR) { warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } sp = &im->gif->SavedImages[im->gif->ImageCount - 1]; im->width = sp->ImageDesc.Width; im->height = sp->ImageDesc.Height; BackGround = im->gif->SBackGroundColor; // XXX needed? ColorMap = im->gif->Image.ColorMap ? im->gif->Image.ColorMap : im->gif->SColorMap; if (ColorMap == NULL) { warn("Image::Scale GIF image has no colormap (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } // Allocate storage for decompressed image image_alloc(im, im->width, im->height); New(0, line, im->width, GifPixelType); if (im->gif->Image.Interlace) { int i; for (i = 0; i < 4; i++) { for (x = InterlacedOffset[i]; x < im->height; x += InterlacedJumps[i]) { ofs = x * im->width; if (DGifGetLine(im->gif, line, 0) != GIF_OK) { warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } for (y = 0; y < im->width; y++) { ColorMapEntry = &ColorMap->Colors[line[y]]; im->pixbuf[ofs++] = COL_FULL( ColorMapEntry->Red, ColorMapEntry->Green, ColorMapEntry->Blue, trans_index == line[y] ? 0 : 255 ); } } } } else { ofs = 0; for (x = 0; x < im->height; x++) { if (DGifGetLine(im->gif, line, 0) != GIF_OK) { warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } for (y = 0; y < im->width; y++) { ColorMapEntry = &ColorMap->Colors[line[y]]; im->pixbuf[ofs++] = COL_FULL( ColorMapEntry->Red, ColorMapEntry->Green, ColorMapEntry->Blue, trans_index == line[y] ? 0 : 255 ); } } } Safefree(line); break; case EXTENSION_RECORD_TYPE: if (DGifGetExtension(im->gif, &ExtFunction, &ExtData) == GIF_ERROR) { warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } if (ExtFunction == 0xF9) { // transparency info if (ExtData[1] & 1) trans_index = ExtData[4]; else trans_index = -1; im->has_alpha = 1; DEBUG_TRACE("GIF transparency index: %d\n", trans_index); } while (ExtData != NULL) { /* Create an extension block with our data */ #ifdef GIFLIB_API_50 if (GifAddExtensionBlock(&im->gif->ExtensionBlockCount, &im->gif->ExtensionBlocks, ExtFunction, ExtData[0], &ExtData[1]) == GIF_ERROR) { #else temp_save.Function = ExtFunction; if (AddExtensionBlock(&temp_save, ExtData[0], &ExtData[1]) == GIF_ERROR) { #endif #ifdef GIFLIB_API_41 PrintGifError(); #endif warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } if (DGifGetExtensionNext(im->gif, &ExtData) == GIF_ERROR) { #ifdef GIFLIB_API_41 PrintGifError(); #endif warn("Image::Scale unable to read GIF file (%s)\n", SvPVX(im->path)); image_gif_finish(im); return 0; } ExtFunction = 0; // CONTINUE_EXT_FUNC_CODE } break; case TERMINATE_RECORD_TYPE: default: break; } } while (RecordType != TERMINATE_RECORD_TYPE); return 1; } void image_gif_finish(image *im) { if (im->gif != NULL) { #ifdef GIFLIB_API_51 if (DGifCloseFile(im->gif, NULL) != GIF_OK) { #else if (DGifCloseFile(im->gif) != GIF_OK) { #endif #ifdef GIFLIB_API_41 PrintGifError(); #endif warn("Image::Scale unable to close GIF file (%s)\n", SvPVX(im->path)); } im->gif = NULL; DEBUG_TRACE("image_gif_finish\n"); } }
PerlIO_setpos(PerlIO *f, const Fpos_t *pos) #endif { return PerlIO_seek(f,*pos,0); }
int image_init(HV *self, image *im) { unsigned char *bptr; char *file = NULL; int ret = 1; if (my_hv_exists(self, "file")) { // Input from file SV *path = *(my_hv_fetch(self, "file")); file = SvPVX(path); im->fh = IoIFP(sv_2io(*(my_hv_fetch(self, "_fh")))); im->path = newSVsv(path); } else { // Input from scalar ref im->fh = NULL; im->path = newSVpv("(data)", 0); im->sv_data = *(my_hv_fetch(self, "data")); if (SvROK(im->sv_data)) im->sv_data = SvRV(im->sv_data); else croak("data is not a scalar ref\n"); } im->pixbuf = NULL; im->outbuf = NULL; im->outbuf_size = 0; im->type = UNKNOWN; im->sv_offset = 0; im->image_offset = 0; im->image_length = 0; im->width = 0; im->height = 0; im->width_padding = 0; im->width_inner = 0; im->height_padding = 0; im->height_inner = 0; im->flipped = 0; im->bpp = 0; im->channels = 0; im->has_alpha = 0; im->orientation = ORIENTATION_NORMAL; im->orientation_orig = ORIENTATION_NORMAL; im->memory_limit = 0; im->target_width = 0; im->target_height = 0; im->keep_aspect = 0; im->resize_type = IMAGE_SCALE_TYPE_GD_FIXED; im->filter = 0; im->bgcolor = 0; im->used = 0; im->palette = NULL; #ifdef HAVE_JPEG im->cinfo = NULL; #endif #ifdef HAVE_PNG im->png_ptr = NULL; im->info_ptr = NULL; #endif #ifdef HAVE_GIF im->gif = NULL; #endif // Read new() options if (my_hv_exists(self, "offset")) { im->image_offset = SvIV(*(my_hv_fetch(self, "offset"))); if (im->fh != NULL) PerlIO_seek(im->fh, im->image_offset, SEEK_SET); } if (my_hv_exists(self, "length")) im->image_length = SvIV(*(my_hv_fetch(self, "length"))); Newz(0, im->buf, sizeof(Buffer), Buffer); buffer_init(im->buf, BUFFER_SIZE); im->memory_used = BUFFER_SIZE; // Determine type of file from magic bytes if (im->fh != NULL) { if ( !_check_buf(im->fh, im->buf, 8, BUFFER_SIZE) ) { image_finish(im); croak("Unable to read image header for %s\n", file); } } else { im->sv_offset = MIN(sv_len(im->sv_data) - im->image_offset, BUFFER_SIZE); buffer_append(im->buf, SvPVX(im->sv_data) + im->image_offset, im->sv_offset); } bptr = buffer_ptr(im->buf); switch (bptr[0]) { case 0xff: if (bptr[1] == 0xd8 && bptr[2] == 0xff) { #ifdef HAVE_JPEG im->type = JPEG; #else image_finish(im); croak("Image::Scale was not built with JPEG support\n"); #endif } break; case 0x89: if (bptr[1] == 'P' && bptr[2] == 'N' && bptr[3] == 'G' && bptr[4] == 0x0d && bptr[5] == 0x0a && bptr[6] == 0x1a && bptr[7] == 0x0a) { #ifdef HAVE_PNG im->type = PNG; #else image_finish(im); croak("Image::Scale was not built with PNG support\n"); #endif } break; case 'G': if (bptr[1] == 'I' && bptr[2] == 'F' && bptr[3] == '8' && (bptr[4] == '7' || bptr[4] == '9') && bptr[5] == 'a') { #ifdef HAVE_GIF im->type = GIF; #else image_finish(im); croak("Image::Scale was not built with GIF support\n"); #endif } break; case 'B': if (bptr[1] == 'M') { im->type = BMP; } break; } DEBUG_TRACE("Image type: %d\n", im->type); // Read image header via type-specific function to determine dimensions switch (im->type) { #ifdef HAVE_JPEG case JPEG: if ( !image_jpeg_read_header(im) ) { ret = 0; goto out; } break; #endif #ifdef HAVE_PNG case PNG: if ( !image_png_read_header(im) ) { ret = 0; goto out; } break; #endif #ifdef HAVE_GIF case GIF: if ( !image_gif_read_header(im) ) { ret = 0; goto out; } break; #endif case BMP: image_bmp_read_header(im); break; case UNKNOWN: warn("Image::Scale unknown file type (%s), first 8 bytes were: %02x %02x %02x %02x %02x %02x %02x %02x\n", SvPVX(im->path), bptr[0], bptr[1], bptr[2], bptr[3], bptr[4], bptr[5], bptr[6], bptr[7]); ret = 0; break; } DEBUG_TRACE("Image dimenensions: %d x %d, channels %d\n", im->width, im->height, im->channels); out: if (ret == 0) image_finish(im); return ret; }
SV * parse_in_chunks(char * filepath, size_t filesize) { char *buf; size_t bytes_read = 0; int max_buf = 1000; char *err_msg; int block = BLOCK_HEADER; int cur_event_type = 0; int event_type = 0; char event_block = 0; char *brnl, *breq; AV * data; AV * datawrapper; AV * events; char *line; char * nl = "\n"; char * eq = "="; int rewind_pos = 0; size_t cur_fpos = 0; SV * pbuf; SV * pmax_buf; AV * HANDLERS = get_av("Opsview::Utils::NDOLogsImporter::HANDLERS", 0); AV * INPUT_DATA_TYPE = get_av("Opsview::Utils::NDOLogsImporter::INPUT_DATA_TYPE", 0); int init_last_pos; int init_block; if ( first_read ) { if ( ! ( fh = PerlIO_open( filepath, "rb" ) ) ) { croak("Could not open file: %s\n", strerror(errno)); } bytes_left = filesize; init_last_pos = prev_pos = first_read = 0; init_block = block = BLOCK_HEADER; } else { init_block = block = BLOCK_EVENTS; init_last_pos = prev_pos; } read_begin: brnl = NULL; breq = NULL; pbuf = get_sv("Opsview::Utils::NDOLogsImporter::PARSE_BUF", 0); pmax_buf = get_sv("Opsview::Utils::NDOLogsImporter::MAX_BUF_SIZE", 0); buf = SvPVX(pbuf); max_buf = SvIVX(pmax_buf); if ( max_buf < 1024 * 1024 && ! automated_tests ) { max_buf = 1024*1024; SvIV_set( pmax_buf, max_buf ); SvGROW( pbuf, max_buf + 1); SvCUR_set( pbuf, max_buf); } if ( bytes_left > 0 ) { bytes_read = PerlIO_read(fh, buf + prev_pos, max_buf-prev_pos); cur_fpos = PerlIO_tell(fh); if ( bytes_read < 0 ) { err_msg = strerror(errno); PerlIO_close( fh ); croak("Could not read file: %s\n", err_msg); } bytes_left -= bytes_read; events = (AV *)sv_2mortal((SV *)newAV()); rewind_pos = last_999(buf+prev_pos, bytes_read); prev_pos = bytes_read + prev_pos - rewind_pos; buf[prev_pos] = '\0'; // avg ratio events:file_size = 0.21% if ( prev_pos > 1000 ) { av_extend( events, (int)(prev_pos * 0.0021) ); } for ( line = strtok_r(buf, nl, &brnl); line != NULL; line = strtok_r(NULL, nl, &brnl) ) { switch(block) { case BLOCK_HEADER: { if ( strEQ(line, "STARTDATADUMP") ) { block = BLOCK_EVENTS; } } break; case BLOCK_EVENTS: { if ( strEQ(line, "1000") ) { /* NDO_API_ENDDATADUMP */ block = BLOCK_FOOTER; continue; } cur_event_type = atoi(line); /* ignore events we are not handling */ if ( ! av_exists(HANDLERS, cur_event_type) ) { block = BLOCK_IGNORE_EVENT; continue; } event_block = BLOCK_EVENT_STARTED; if ( cur_event_type != event_type ) { datawrapper = (AV *)sv_2mortal((SV *)newAV()); data = (AV *)sv_2mortal((SV *)newAV()); av_push( events, newSViv( cur_event_type ) ); av_push( datawrapper, newRV( (SV *)data ) ); av_push( events, newRV( (SV *)datawrapper ) ); event_type = cur_event_type; } else { data = (AV *)sv_2mortal((SV *)newAV()); av_push( datawrapper, newRV( (SV *)data ) ); } block = BLOCK_EVENT; } break; case BLOCK_EVENT: { if ( strEQ(line, "999") ) { /* NDO_API_ENDDATA */ block = BLOCK_EVENTS; event_block = BLOCK_EVENT_ENDED; } else { char *k; char *v; int key; int key_type = 0; int v_len = 0; k = strtok_r(line, eq, &breq); v = strtok_r(NULL, "\0", &breq); key = atoi(k); /* invalid key, skip parsing */ if ( key == 0 ) { goto remove_invalid; } SV ** const k_type = av_fetch(INPUT_DATA_TYPE, key, 0 ); if ( k_type ) { key_type = SvIVx( *k_type ); } if ( v ) { if ( key_type & 1 ) { v_len = ndo_unescape_buffer( v ); } else { v_len = strlen(v); } } if ( key_type & 2 ) { AV * datanstptr; SV ** const datanst = av_fetch(data, key, 0 ); if ( datanst ) { datanstptr = (AV *)SvRV( *datanst ); } else { datanstptr = (AV *)sv_2mortal((SV *)newAV()); av_store( data, key, newRV( (SV *)datanstptr ) ); } if ( v ) { av_push( datanstptr, newSVpvn(v, v_len) ); } else { av_push( datanstptr, newSVpvn("", 0) ); } } else { if ( v ) { av_store( data, key, newSVpvn(v, v_len) ); } else { av_store( data, key, newSVpvn("", 0) ); } } } } break; case BLOCK_FOOTER: { if ( strEQ(line, "GOODBYE") ) { block = BLOCK_HEADER; } } break; case BLOCK_IGNORE_EVENT: { if ( strEQ(line, "999") ) { /* NDO_API_ENDDATA */ block = BLOCK_EVENTS; // go back to EVENTS continue; } } break; } }; /* there were some events */ if ( event_block != BLOCK_HEADER ) { if ( event_block != BLOCK_EVENT_ENDED ) { remove_invalid: av_pop( datawrapper ); } /* remove whole block if the last block has no events */ if ( av_len( datawrapper ) == -1 ) { av_pop( events ); av_pop( events ); } } if ( av_len(events) > 0 ) { if ( rewind_pos > 0 && cur_fpos < filesize ) { memmove(buf, buf+prev_pos+1, rewind_pos-1); } prev_pos = rewind_pos - 1; return newRV_inc((SV *) events); } else { if ( cur_fpos < filesize && event_block != BLOCK_HEADER && event_block != BLOCK_EVENT_ENDED ) { int new_max_buf = max_buf * 2; SvIV_set( pmax_buf, new_max_buf ); SvGROW( pbuf, new_max_buf + 1); SvCUR_set( pbuf, new_max_buf); //start again as previous buffer would be tokenized already prev_pos = 0; block = init_block; event_type = 0; PerlIO_close( fh ); if ( ! ( fh = PerlIO_open( filepath, "rb" ) ) ) { croak("Could not re-open file: %s\n", strerror(errno)); } PerlIO_seek(fh, cur_fpos-bytes_read-init_last_pos, SEEK_SET); bytes_left += bytes_read + init_last_pos; goto read_begin; } } } parser_reset_iterator(); return &PL_sv_undef; }
int _check_buf(PerlIO *infile, Buffer *buf, int min_wanted, int max_wanted) { int ret = 1; // Do we have enough data? if ( buffer_len(buf) < min_wanted ) { // Read more data uint32_t read; uint32_t actual_wanted; unsigned char *tmp; #ifdef _MSC_VER uint32_t pos_check = PerlIO_tell(infile); #endif if (min_wanted > max_wanted) { max_wanted = min_wanted; } // Adjust actual amount to read by the amount we already have in the buffer actual_wanted = max_wanted - buffer_len(buf); New(0, tmp, actual_wanted, unsigned char); DEBUG_TRACE("Buffering from file @ %d (min_wanted %d, max_wanted %d, adjusted to %d)\n", (int)PerlIO_tell(infile), min_wanted, max_wanted, actual_wanted ); if ( (read = PerlIO_read(infile, tmp, actual_wanted)) <= 0 ) { if ( PerlIO_error(infile) ) { #ifdef _MSC_VER // Show windows specific error message as Win32 PerlIO_read does not set errno DWORD last_error = GetLastError(); LPWSTR *errmsg = NULL; FormatMessage(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM, 0, last_error, 0, (LPWSTR)&errmsg, 0, NULL); warn("Error reading: %d %s (read %d wanted %d)\n", last_error, errmsg, read, actual_wanted); LocalFree(errmsg); #else warn("Error reading: %s (wanted %d)\n", strerror(errno), actual_wanted); #endif } else { warn("Error: Unable to read at least %d bytes from file.\n", min_wanted); } ret = 0; goto out; } buffer_append(buf, tmp, read); // Make sure we got enough if ( buffer_len(buf) < min_wanted ) { warn("Error: Unable to read at least %d bytes from file (only read %d).\n", min_wanted, read); ret = 0; goto out; } #ifdef _MSC_VER // Bug 16095, weird off-by-one bug seen only on Win32 and only when reading a filehandle if (PerlIO_tell(infile) != pos_check + read) { //PerlIO_printf(PerlIO_stderr(), "Win32 bug, pos should be %d, but was %d\n", pos_check + read, PerlIO_tell(infile)); PerlIO_seek(infile, pos_check + read, SEEK_SET); } #endif DEBUG_TRACE("Buffered %d bytes, new pos %d\n", read, (int)PerlIO_tell(infile)); out: Safefree(tmp); }
void _parse_wav(PerlIO *infile, Buffer *buf, char *file, uint32_t file_size, HV *info, HV *tags) { uint32_t offset = 12; while ( offset < file_size - 8 ) { char chunk_id[5]; uint32_t chunk_size; // Verify we have at least 8 bytes if ( !_check_buf(infile, buf, 8, WAV_BLOCK_SIZE) ) { return; } strncpy( chunk_id, (char *)buffer_ptr(buf), 4 ); chunk_id[4] = '\0'; buffer_consume(buf, 4); chunk_size = buffer_get_int_le(buf); // Adjust for padding if ( chunk_size % 2 ) { chunk_size++; } offset += 8; DEBUG_TRACE("%s size %d\n", chunk_id, chunk_size); // Seek past data, everything else we parse // XXX: Are there other large chunks we should ignore? if ( !strcmp( chunk_id, "data" ) ) { SV **bitrate; my_hv_store( info, "audio_offset", newSVuv(offset) ); my_hv_store( info, "audio_size", newSVuv(chunk_size) ); // Calculate duration, unless we already know it (i.e. from 'fact') if ( !my_hv_fetch( info, "song_length_ms" ) ) { bitrate = my_hv_fetch( info, "bitrate" ); if (bitrate != NULL) { my_hv_store( info, "song_length_ms", newSVuv( (chunk_size / (SvIV(*bitrate) / 8.)) * 1000 ) ); } } // sanity check size, this is inside the data chunk code // to support setting audio_offset even when the data size is wrong if (chunk_size > file_size - offset) { DEBUG_TRACE("data size > file_size, skipping\n"); return; } // Seek past data if there are more chunks after it if ( file_size > offset + chunk_size ) { PerlIO_seek(infile, offset + chunk_size, SEEK_SET); } buffer_clear(buf); } else if ( !strcmp( chunk_id, "id3 " ) || !strcmp( chunk_id, "ID3 " ) || !strcmp( chunk_id, "ID32" ) ) { // Read header to verify version unsigned char *bptr = buffer_ptr(buf); if ( (bptr[0] == 'I' && bptr[1] == 'D' && bptr[2] == '3') && bptr[3] < 0xff && bptr[4] < 0xff && bptr[6] < 0x80 && bptr[7] < 0x80 && bptr[8] < 0x80 && bptr[9] < 0x80 ) { // Start parsing ID3 from offset parse_id3(infile, file, info, tags, offset, file_size); } // Seek past ID3 and clear buffer PerlIO_seek(infile, offset + chunk_size, SEEK_SET); buffer_clear(buf); } else { // sanity check size if (chunk_size > file_size - offset) { DEBUG_TRACE("chunk_size > file_size, skipping\n"); return; } // Make sure we have enough data if ( !_check_buf(infile, buf, chunk_size, WAV_BLOCK_SIZE) ) { return; } if ( !strcmp( chunk_id, "fmt " ) ) { _parse_wav_fmt(buf, chunk_size, info); } else if ( !strcmp( chunk_id, "LIST" ) ) { _parse_wav_list(buf, chunk_size, tags); } else if ( !strcmp( chunk_id, "PEAK" ) ) { _parse_wav_peak(buf, chunk_size, info, 0); } else if ( !strcmp( chunk_id, "fact" ) ) { // A 4-byte fact chunk in a non-PCM wav is the number of samples // Use it to calculate duration if ( chunk_size == 4 ) { uint32_t num_samples = buffer_get_int_le(buf); SV **samplerate = my_hv_fetch( info, "samplerate" ); if (samplerate != NULL) { my_hv_store( info, "song_length_ms", newSVuv( (num_samples * 1000) / SvIV(*samplerate) ) ); } } else { // Unknown, skip it buffer_consume(buf, chunk_size); } } else { if ( !strcmp(chunk_id, "SAUR") // Wavosour data chunk || !strcmp(chunk_id, "otom") // Wavosaur? || !strcmp(chunk_id, "PAD ") // Padding ) { // Known chunks to skip } else { // Warn about unknown chunks so we can investigate them PerlIO_printf(PerlIO_stderr(), "Unhandled WAV chunk %s size %d (skipped)\n", chunk_id, chunk_size); } buffer_consume(buf, chunk_size); } } offset += chunk_size; } }
void _parse_aiff(PerlIO *infile, Buffer *buf, char *file, uint32_t file_size, HV *info, HV *tags) { uint32_t offset = 12; while ( offset < file_size - 8 ) { char chunk_id[5]; int chunk_size; // Verify we have at least 8 bytes if ( !_check_buf(infile, buf, 8, WAV_BLOCK_SIZE) ) { return; } strncpy( chunk_id, (char *)buffer_ptr(buf), 4 ); chunk_id[4] = '\0'; buffer_consume(buf, 4); chunk_size = buffer_get_int(buf); // Adjust for padding if ( chunk_size % 2 ) { chunk_size++; } offset += 8; DEBUG_TRACE("%s size %d\n", chunk_id, chunk_size); // Seek past SSND, everything else we parse // XXX: Are there other large chunks we should ignore? if ( !strcmp( chunk_id, "SSND" ) ) { my_hv_store( info, "audio_offset", newSVuv(offset) ); my_hv_store( info, "audio_size", newSVuv(chunk_size) ); // Seek past data if there are more chunks after it if ( file_size > offset + chunk_size ) { PerlIO_seek(infile, offset + chunk_size, SEEK_SET); } buffer_clear(buf); } else if ( !strcmp( chunk_id, "id3 " ) || !strcmp( chunk_id, "ID3 " ) || !strcmp( chunk_id, "ID32" ) ) { // Read header to verify version unsigned char *bptr = buffer_ptr(buf); if ( (bptr[0] == 'I' && bptr[1] == 'D' && bptr[2] == '3') && bptr[3] < 0xff && bptr[4] < 0xff && bptr[6] < 0x80 && bptr[7] < 0x80 && bptr[8] < 0x80 && bptr[9] < 0x80 ) { // Start parsing ID3 from offset parse_id3(infile, file, info, tags, offset, file_size); } // Seen ID3 chunks with the chunk size in little-endian instead of big-endian if (chunk_size < 0 || offset + chunk_size > file_size) { break; } // Seek past ID3 and clear buffer DEBUG_TRACE("Seeking past ID3 to %d\n", offset + chunk_size); PerlIO_seek(infile, offset + chunk_size, SEEK_SET); buffer_clear(buf); } else { // Make sure we have enough data if ( !_check_buf(infile, buf, chunk_size, WAV_BLOCK_SIZE) ) { return; } if ( !strcmp( chunk_id, "COMM" ) ) { _parse_aiff_comm(buf, chunk_size, info); } else if ( !strcmp( chunk_id, "PEAK" ) ) { _parse_wav_peak(buf, chunk_size, info, 1); } else { PerlIO_printf(PerlIO_stderr(), "Unhandled AIFF chunk %s size %d (skipped)\n", chunk_id, chunk_size); buffer_consume(buf, chunk_size); } } offset += chunk_size; } }