/* * Receive interrupt. */ int berint(struct be_softc *sc) { struct qec_xd *xd = sc->sc_rb.rb_rxd; unsigned int bix, len; unsigned int nrbuf = sc->sc_rb.rb_nrbuf; bix = sc->sc_rb.rb_rdtail; /* * Process all buffers with valid data. */ for (;;) { len = xd[bix].xd_flags; if (len & QEC_XD_OWN) break; len &= QEC_XD_LENGTH; be_read(sc, bix, len); /* ... */ xd[(bix+nrbuf) % QEC_XD_RING_MAXSIZE].xd_flags = QEC_XD_OWN | (BE_PKT_BUF_SZ & QEC_XD_LENGTH); if (++bix == QEC_XD_RING_MAXSIZE) bix = 0; } sc->sc_rb.rb_rdtail = bix; return 1; }
chd_error chd_file::open_common(bool writeable) { // wrap in try for proper error handling try { // reads are always permitted m_allow_reads = true; // read the raw header UINT8 rawheader[MAX_HEADER_SIZE]; file_read(0, rawheader, sizeof(rawheader)); // verify the signature if (memcmp(rawheader, "MComprHD", 8) != 0) throw CHDERR_INVALID_FILE; // only allow writes to the most recent version m_version = be_read(&rawheader[12], 4); if (writeable && m_version < HEADER_VERSION) throw CHDERR_UNSUPPORTED_VERSION; // read the header if we support it sha1_t parentsha1 = sha1_t::null; switch (m_version) { case 3: parse_v3_header(rawheader, parentsha1); break; case 4: parse_v4_header(rawheader, parentsha1); break; case 5: parse_v5_header(rawheader, parentsha1); break; default: throw CHDERR_UNSUPPORTED_VERSION; } if (writeable && !m_allow_writes) throw CHDERR_FILE_NOT_WRITEABLE; // make sure we have a parent if we need one (and don't if we don't) if (parentsha1 != sha1_t::null) { if (m_parent == NULL) m_parent_missing = true; else if (m_parent->sha1() != parentsha1) throw CHDERR_INVALID_PARENT; } else if (m_parent != NULL) throw CHDERR_INVALID_PARAMETER; // finish opening the file create_open_common(); return CHDERR_NONE; } // handle errors by closing ourself catch (chd_error &err) { close(); return err; } }
void output_handle_th(void *p) { char *c = malloc(O_BUF); int r, b = 0; if (!c) return; setsockopt(ohpipe[1], SOL_SOCKET, SO_NONBLOCK, &b, sizeof(b)); while ((r = be_read(ohpipe[0], c, O_BUF)) > 0) write(1, c, r); free(c); }
bool chd_file::metadata_find(chd_metadata_tag metatag, INT32 metaindex, metadata_entry &metaentry, bool resume) { // start at the beginning unless we're resuming a previous search if (!resume) { metaentry.offset = m_metaoffset; metaentry.prev = 0; } else { metaentry.prev = metaentry.offset; metaentry.offset = metaentry.next; } // loop until we run out of options while (metaentry.offset != 0) { // read the raw header UINT8 raw_meta_header[METADATA_HEADER_SIZE]; file_read(metaentry.offset, raw_meta_header, sizeof(raw_meta_header)); // extract the data metaentry.metatag = be_read(&raw_meta_header[0], 4); metaentry.flags = raw_meta_header[4]; metaentry.length = be_read(&raw_meta_header[5], 3); metaentry.next = be_read(&raw_meta_header[8], 8); // if we got a match, proceed if (metatag == CHDMETATAG_WILDCARD || metaentry.metatag == metatag) if (metaindex-- == 0) return true; // no match, fetch the next link metaentry.prev = metaentry.offset; metaentry.offset = metaentry.next; } // if we get here, we didn't find it return false; }
void chd_file::parse_v5_header(UINT8 *rawheader, sha1_t &parentsha1) { // verify header length if (be_read(&rawheader[8], 4) != V5_HEADER_SIZE) throw CHDERR_INVALID_FILE; // extract core info m_logicalbytes = be_read(&rawheader[32], 8); m_mapoffset = be_read(&rawheader[40], 8); m_metaoffset = be_read(&rawheader[48], 8); m_hunkbytes = be_read(&rawheader[56], 4); m_hunkcount = (m_logicalbytes + m_hunkbytes - 1) / m_hunkbytes; m_unitbytes = be_read(&rawheader[60], 4); m_unitcount = (m_logicalbytes + m_unitbytes - 1) / m_unitbytes; // determine compression m_compression[0] = be_read(&rawheader[16], 4); m_compression[1] = be_read(&rawheader[20], 4); m_compression[2] = be_read(&rawheader[24], 4); m_compression[3] = be_read(&rawheader[28], 4); m_allow_writes = !compressed(); // describe the format m_mapoffset_offset = 40; m_metaoffset_offset = 48; m_sha1_offset = 84; m_rawsha1_offset = 64; m_parentsha1_offset = 104; // determine properties of map entries m_mapentrybytes = compressed() ? 12 : 4; // extract parent SHA-1 parentsha1 = be_read_sha1(&rawheader[m_parentsha1_offset]); }
void chd_file::parse_v4_header(UINT8 *rawheader, sha1_t &parentsha1) { // verify header length if (be_read(&rawheader[8], 4) != V4_HEADER_SIZE) throw CHDERR_INVALID_FILE; // extract core info m_logicalbytes = be_read(&rawheader[28], 8); m_mapoffset = 108; m_metaoffset = be_read(&rawheader[36], 8); m_hunkbytes = be_read(&rawheader[44], 4); m_hunkcount = be_read(&rawheader[24], 4); // extract parent SHA-1 UINT32 flags = be_read(&rawheader[16], 4); m_allow_writes = (flags & 2) == 0; // determine compression switch (be_read(&rawheader[20], 4)) { case 0: m_compression[0] = CHD_CODEC_NONE; break; case 1: m_compression[0] = CHD_CODEC_ZLIB; break; case 2: m_compression[0] = CHD_CODEC_ZLIB; break; case 3: m_compression[0] = CHD_CODEC_AVHUFF; break; default: throw CHDERR_UNKNOWN_COMPRESSION; } m_compression[1] = m_compression[2] = m_compression[3] = CHD_CODEC_NONE; // describe the format m_mapoffset_offset = 0; m_metaoffset_offset = 36; m_sha1_offset = 48; m_rawsha1_offset = 88; m_parentsha1_offset = 68; // determine properties of map entries m_mapentrybytes = 16; // extract parent SHA-1 if (flags & 1) parentsha1 = be_read_sha1(&rawheader[m_parentsha1_offset]); // guess at the units based on snooping the metadata m_unitbytes = guess_unitbytes(); m_unitcount = (m_logicalbytes + m_unitbytes - 1) / m_unitbytes; }
/** * Reads a Big-Endian encoded uint32_t from a FILE. * * @param[out] to pointer to target int32_t * @param[in] file file to read from * * @return true on success */ bool be_read_ui(uint32_t* to, FILE* file) { return be_read(to, file); }
/** * Reads a Big-Endian encoded uint16_t from a FILE. * * @param[out] to pointer to target int16_t * @param[in] file file to read from * * @return true on success */ bool be_read_us(uint16_t* to, FILE* file) { return be_read(to, file); }
void chd_file::decompress_v5_map() { // if no offset, we haven't written it yet if (m_mapoffset == 0) { memset(m_rawmap, 0xff, m_rawmap.count()); return; } // read the reader UINT8 rawbuf[16]; file_read(m_mapoffset, rawbuf, sizeof(rawbuf)); UINT32 mapbytes = be_read(&rawbuf[0], 4); UINT64 firstoffs = be_read(&rawbuf[4], 6); UINT16 mapcrc = be_read(&rawbuf[10], 2); UINT8 lengthbits = rawbuf[12]; UINT8 selfbits = rawbuf[13]; UINT8 parentbits = rawbuf[14]; // now read the map dynamic_buffer compressed(mapbytes); file_read(m_mapoffset + 16, compressed, mapbytes); bitstream_in bitbuf(compressed, compressed.count()); // first decode the compression types huffman_decoder<16, 8> decoder; huffman_error err = decoder.import_tree_rle(bitbuf); if (err != HUFFERR_NONE) throw CHDERR_DECOMPRESSION_ERROR; UINT8 lastcomp = 0; int repcount = 0; for (int hunknum = 0; hunknum < m_hunkcount; hunknum++) { UINT8 *rawmap = &m_rawmap[hunknum * 12]; if (repcount > 0) rawmap[0] = lastcomp, repcount--; else { UINT8 val = decoder.decode_one(bitbuf); if (val == COMPRESSION_RLE_SMALL) rawmap[0] = lastcomp, repcount = 2 + decoder.decode_one(bitbuf); else if (val == COMPRESSION_RLE_LARGE) rawmap[0] = lastcomp, repcount = 2 + 16 + (decoder.decode_one(bitbuf) << 4), repcount += decoder.decode_one(bitbuf); else rawmap[0] = lastcomp = val; } } // then iterate through the hunks and extract the needed data UINT64 curoffset = firstoffs; UINT32 last_self = 0; UINT64 last_parent = 0; for (int hunknum = 0; hunknum < m_hunkcount; hunknum++) { UINT8 *rawmap = &m_rawmap[hunknum * 12]; UINT64 offset = curoffset; UINT32 length = 0; UINT16 crc = 0; switch (rawmap[0]) { // base types case COMPRESSION_TYPE_0: case COMPRESSION_TYPE_1: case COMPRESSION_TYPE_2: case COMPRESSION_TYPE_3: curoffset += length = bitbuf.read(lengthbits); crc = bitbuf.read(16); break; case COMPRESSION_NONE: curoffset += length = m_hunkbytes; crc = bitbuf.read(16); break; case COMPRESSION_SELF: last_self = offset = bitbuf.read(selfbits); break; case COMPRESSION_PARENT: offset = bitbuf.read(parentbits); last_parent = offset; break; // pseudo-types; convert into base types case COMPRESSION_SELF_1: last_self++; case COMPRESSION_SELF_0: rawmap[0] = COMPRESSION_SELF; offset = last_self; break; case COMPRESSION_PARENT_SELF: rawmap[0] = COMPRESSION_PARENT; last_parent = offset = (UINT64(hunknum) * UINT64(m_hunkbytes)) / m_unitbytes; break; case COMPRESSION_PARENT_1: last_parent += m_hunkbytes / m_unitbytes; case COMPRESSION_PARENT_0: rawmap[0] = COMPRESSION_PARENT; offset = last_parent; break; } be_write(&rawmap[1], length, 3); be_write(&rawmap[4], offset, 6); be_write(&rawmap[10], crc, 2); } // verify the final CRC if (crc16_creator::simple(m_rawmap, m_hunkcount * 12) != mapcrc) throw CHDERR_DECOMPRESSION_ERROR; }
chd_error chd_file::read_hunk(UINT32 hunknum, void *buffer) { // wrap this for clean reporting try { // punt if no file if (m_file == NULL) throw CHDERR_NOT_OPEN; // return an error if out of range if (hunknum >= m_hunkcount) throw CHDERR_HUNK_OUT_OF_RANGE; // get a pointer to the map entry UINT64 blockoffs; UINT32 blocklen; UINT32 blockcrc; UINT8 *rawmap; UINT8 *dest = reinterpret_cast<UINT8 *>(buffer); switch (m_version) { // v3/v4 map entries case 3: case 4: rawmap = m_rawmap + 16 * hunknum; blockoffs = be_read(&rawmap[0], 8); blockcrc = be_read(&rawmap[8], 4); switch (rawmap[15] & V34_MAP_ENTRY_FLAG_TYPE_MASK) { case V34_MAP_ENTRY_TYPE_COMPRESSED: blocklen = be_read(&rawmap[12], 2) + (rawmap[14] << 16); file_read(blockoffs, m_compressed, blocklen); m_decompressor[0]->decompress(m_compressed, blocklen, dest, m_hunkbytes); if (!(rawmap[15] & V34_MAP_ENTRY_FLAG_NO_CRC) && dest != NULL && crc32_creator::simple(dest, m_hunkbytes) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; return CHDERR_NONE; case V34_MAP_ENTRY_TYPE_UNCOMPRESSED: file_read(blockoffs, dest, m_hunkbytes); if (!(rawmap[15] & V34_MAP_ENTRY_FLAG_NO_CRC) && crc32_creator::simple(dest, m_hunkbytes) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; return CHDERR_NONE; case V34_MAP_ENTRY_TYPE_MINI: be_write(dest, blockoffs, 8); for (UINT32 bytes = 8; bytes < m_hunkbytes; bytes++) dest[bytes] = dest[bytes - 8]; if (!(rawmap[15] & V34_MAP_ENTRY_FLAG_NO_CRC) && crc32_creator::simple(dest, m_hunkbytes) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; return CHDERR_NONE; case V34_MAP_ENTRY_TYPE_SELF_HUNK: return read_hunk(blockoffs, dest); case V34_MAP_ENTRY_TYPE_PARENT_HUNK: if (m_parent_missing) throw CHDERR_REQUIRES_PARENT; return m_parent->read_hunk(blockoffs, dest); } break; // v5 map entries case 5: rawmap = m_rawmap + m_mapentrybytes * hunknum; // uncompressed case if (!compressed()) { blockoffs = UINT64(be_read(rawmap, 4)) * UINT64(m_hunkbytes); if (blockoffs != 0) file_read(blockoffs, dest, m_hunkbytes); else if (m_parent_missing) throw CHDERR_REQUIRES_PARENT; else if (m_parent != NULL) m_parent->read_hunk(hunknum, dest); else memset(dest, 0, m_hunkbytes); return CHDERR_NONE; } // compressed case blocklen = be_read(&rawmap[1], 3); blockoffs = be_read(&rawmap[4], 6); blockcrc = be_read(&rawmap[10], 2); switch (rawmap[0]) { case COMPRESSION_TYPE_0: case COMPRESSION_TYPE_1: case COMPRESSION_TYPE_2: case COMPRESSION_TYPE_3: file_read(blockoffs, m_compressed, blocklen); m_decompressor[rawmap[0]]->decompress(m_compressed, blocklen, dest, m_hunkbytes); if (!m_decompressor[rawmap[0]]->lossy() && dest != NULL && crc16_creator::simple(dest, m_hunkbytes) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; if (m_decompressor[rawmap[0]]->lossy() && crc16_creator::simple(m_compressed, blocklen) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; return CHDERR_NONE; case COMPRESSION_NONE: file_read(blockoffs, dest, m_hunkbytes); if (crc16_creator::simple(dest, m_hunkbytes) != blockcrc) throw CHDERR_DECOMPRESSION_ERROR; return CHDERR_NONE; case COMPRESSION_SELF: return read_hunk(blockoffs, dest); case COMPRESSION_PARENT: if (m_parent_missing) throw CHDERR_REQUIRES_PARENT; return m_parent->read_bytes(UINT64(blockoffs) * UINT64(m_parent->unit_bytes()), dest, m_hunkbytes); } break; } // if we get here, something was wrong throw CHDERR_READ_ERROR; } // just return errors catch (chd_error &err) { return err; } }
chd_error chd_file::hunk_info(UINT32 hunknum, chd_codec_type &compressor, UINT32 &compbytes) { // error if invalid if (hunknum >= m_hunkcount) return CHDERR_HUNK_OUT_OF_RANGE; // get the map pointer UINT8 *rawmap; switch (m_version) { // v3/v4 map entries case 3: case 4: rawmap = m_rawmap + 16 * hunknum; switch (rawmap[15] & V34_MAP_ENTRY_FLAG_TYPE_MASK) { case V34_MAP_ENTRY_TYPE_COMPRESSED: compressor = CHD_CODEC_ZLIB; compbytes = be_read(&rawmap[12], 2) + (rawmap[14] << 16); break; case V34_MAP_ENTRY_TYPE_UNCOMPRESSED: compressor = CHD_CODEC_NONE; compbytes = m_hunkbytes; break; case V34_MAP_ENTRY_TYPE_MINI: compressor = CHD_CODEC_MINI; compbytes = 0; break; case V34_MAP_ENTRY_TYPE_SELF_HUNK: compressor = CHD_CODEC_SELF; compbytes = 0; break; case V34_MAP_ENTRY_TYPE_PARENT_HUNK: compressor = CHD_CODEC_PARENT; compbytes = 0; break; } break; // v5 map entries case 5: rawmap = m_rawmap + m_mapentrybytes * hunknum; // uncompressed case if (!compressed()) { if (be_read(&rawmap[0], 4) == 0) { compressor = CHD_CODEC_PARENT; compbytes = 0; } else { compressor = CHD_CODEC_NONE; compbytes = m_hunkbytes; } break; } // compressed case switch (rawmap[0]) { case COMPRESSION_TYPE_0: case COMPRESSION_TYPE_1: case COMPRESSION_TYPE_2: case COMPRESSION_TYPE_3: compressor = m_compression[rawmap[0]]; compbytes = be_read(&rawmap[1], 3); break; case COMPRESSION_NONE: compressor = CHD_CODEC_NONE; compbytes = m_hunkbytes; break; case COMPRESSION_SELF: compressor = CHD_CODEC_SELF; compbytes = 0; break; case COMPRESSION_PARENT: compressor = CHD_CODEC_PARENT; compbytes = 0; break; } break; } return CHDERR_NONE; }