Esempio n. 1
0
ImgColor::ImgColor(uint32_t id, const void* data, size_t size) : ResourceImpl{id}
{
    BinReader reader(data, size);

    uint32_t resourceId = reader.readInt();
    assert(resourceId == id);
    UNUSED(resourceId);

    uint32_t unk1 = reader.readInt();
    assert(unk1 <= 0xA);
    UNUSED(unk1);

    uint32_t width = reader.readInt();
    assert(width <= 4096);

    uint32_t height = reader.readInt();
    assert(height <= 4096);

    PixelFormat format = static_cast<PixelFormat>(reader.readInt());

    if(format == PixelFormat::kCustomRawJPEG)
    {
        throw runtime_error("JPEG textures not supported");
    }

    uint32_t pixelsSize = reader.readInt();
    assert(pixelsSize * 8 == width * height * bitsPerPixel(format));

    const uint8_t* pixels = reader.readRaw(pixelsSize);

    if(isPaletted(format))
    {
        uint32_t paletteId = reader.readInt();
        palette = Core::get().resourceCache().get(paletteId);
    }

    assert(reader.remaining() == 0);

    image.init(format, width, height, pixels);

    if(palette)
    {
        image.applyPalette(palette->cast<Palette>());
    }
}
Esempio n. 2
0
bool generatePreview(const QString& textureFilename, const QString& paletteFilename, const QString& previewFilename, const QString& codeUsageFilename) {
	char	magic[4];
	qint16	width, height;
	qint32	textureType;
	qint32	textureSize;
	quint8* data = NULL;

	const bool genPreview = !previewFilename.isEmpty();
	const bool genCodeUsage = !codeUsageFilename.isEmpty();

	if (textureFilename.isEmpty()) {
		qCritical() << "generatePreview requires a texture filename";
		return false;
	}

	if (!genPreview && !genCodeUsage) {
		qCritical() << "generatePreview requires either a preview filename or a code usage filename";
		return false;
	}

	// Open up an input stream to read the texture
	QFile in(textureFilename);
	if (!in.open(QIODevice::ReadOnly)) {
		qCritical() << "Failed to open" << textureFilename;
		return false;
	}
	QDataStream stream(&in);
	stream.setByteOrder(QDataStream::LittleEndian);

	// Read the header
	stream.readRawData(magic, 4);
	stream >> width;
	stream >> height;
	stream >> textureType;
	stream >> textureSize;

	// Verify the header
	if (memcmp(magic, TEXTURE_MAGIC, 4) != 0) {
		qCritical() << textureFilename << "is not a valid texture file";
		in.close();
		return false;
	}

	// Read the texture data and close the stream
	data = new quint8[textureSize];
	stream.readRawData((char*)data, textureSize);
	in.close();

	if (!genPreview && !(textureType & FLAG_COMPRESSED)) {
		qCritical() << "generatePreview was told to only generate code usage, but texture is not compressed";
		return false;
	}

	// Texture width for stride textures are stored in the stride setting, not in
	// the width field. So unpack that if neccessary.
	if (textureType & FLAG_STRIDED) {
		width = (textureType & 31) * 32;
	}

	const int pixelFormat = (textureType >> PIXELFORMAT_SHIFT) & PIXELFORMAT_MASK;
	QVector<QImage> decodedImages;
	QVector<QImage> codeUsageImages;

	/*qDebug() << "Loaded texture" << textureFilename;
	qDebug("Width        : %d", width);
	qDebug("Height       : %d", height);
	qDebug("TextureType  : %08x", textureType);
	qDebug("Pixel format : %d", pixelFormat);
	qDebug("Size (bytes) : %d", textureSize);*/

	if (textureType & FLAG_STRIDED) {
		QImage img(width, height, QImage::Format_ARGB32);
		img.fill(Qt::transparent);

		if (pixelFormat == PIXELFORMAT_YUV422) {
			for (int y=0; y<height; y++) {
				for (int x=0; x<width; x+=2) {
					const int i0 = (y * width + x + 0) * 2;
					const int i1 = (y * width + x + 1) * 2;
					const quint16 p0 = qFromLittleEndian<quint16>(&data[i0]);
					const quint16 p1 = qFromLittleEndian<quint16>(&data[i1]);
					QRgb rgb0, rgb1;
					YUV422toRGB(p0, p1, rgb0, rgb1);
					img.setPixel(x + 0, y, rgb0);
					img.setPixel(x + 1, y, rgb1);
				}
			}
		} else {
			for (int y=0; y<height; y++) {
				for (int x=0; x<width; x++) {
					const int index = (y * width + x) * 2;
					const quint16 pixel = qFromLittleEndian<quint16>(&data[index]);
					img.setPixel(x, y, to32BPP(pixel, pixelFormat));
				}
			}
		}

		decodedImages.push_back(img);
	} else if (is16BPP(textureType) && !(textureType & FLAG_COMPRESSED)) {
		int currentWidth = width;
		int currentHeight = height;
		int offset = 0;

		if (textureType & FLAG_MIPMAPPED) {
			currentWidth = 1;
			currentHeight = 1;
			offset = MIPMAP_OFFSET_16BPP;
		}

		while (currentWidth <= width && currentHeight <= height) {
			QImage img(currentWidth, currentHeight, QImage::Format_ARGB32);
			img.fill(Qt::transparent);
			const Twiddler twiddler(currentWidth, currentHeight);
			const int pixels = currentWidth * currentHeight;

			if (pixelFormat == PIXELFORMAT_YUV422) {
				if (pixels == 1) {
					// The 1x1 mipmap level for YUV textures is stored as RGB565
					const quint16 texel = qFromLittleEndian<quint16>(&data[offset]);
					img.setPixel(0, 0, to32BPP(texel, PIXELFORMAT_RGB565));
				} else {
					for (int i=0; i<pixels; i+=4) {
						quint16 texel[4];
						QRgb pixel[4];

						for (int j=0; j<4; j++)
							texel[j] = qFromLittleEndian<quint16>(&data[offset + (i+j)*2]);

						YUV422toRGB(texel[0], texel[2], pixel[0], pixel[2]);
						YUV422toRGB(texel[1], texel[3], pixel[1], pixel[3]);

						for (int j=0; j<4; j++) {
							const int twidx = twiddler.index(i+j);
							const int x = twidx % currentWidth;
							const int y = twidx / currentWidth;
							img.setPixel(x, y, pixel[j]);
						}
					}
				}
			} else {
				for (int i=0; i<pixels; i++) {
					const quint16 texel = qFromLittleEndian<quint16>(&data[offset + i*2]);
					const QRgb pixel = to32BPP(texel, pixelFormat);
					const int twidx = twiddler.index(i);
					const int x = twidx % currentWidth;
					const int y = twidx / currentWidth;
					img.setPixel(x, y, pixel);
				}
			}

			decodedImages.push_front(img);

			offset += (currentWidth * currentHeight * 2);
			currentWidth *= 2;
			currentHeight *= 2;
		}
	} else if (isPaletted(textureType) && !(textureType & FLAG_COMPRESSED)) {
		if (paletteFilename.isEmpty())
			return false;
		Palette palette;
		if (!palette.load(paletteFilename))
			return false;

		if (isFormat(textureType, PIXELFORMAT_PAL4BPP)) {
			int currentWidth = width;
			int currentHeight = height;
			int offset = 0;

			if (textureType & FLAG_MIPMAPPED) {
				currentWidth = 1;
				currentHeight = 1;
				offset = MIPMAP_OFFSET_4BPP;
			}

			while (currentWidth <= width && currentHeight <= height) {
				QImage img(currentWidth, currentHeight, QImage::Format_ARGB32);
				img.fill(Qt::transparent);
				const Twiddler twiddler(currentWidth, currentHeight);
				const int pixels = (currentWidth * currentHeight) / 2;

				if (currentWidth == 1 && currentHeight == 1) {
					img.setPixel(0, 0, palette.colorAt(data[offset] & 0xf));
					offset++;
				} else {
					for (int i=0; i<pixels; i++) {
						const QRgb pixel0 = palette.colorAt((data[offset + i] >> 0) & 0xf);
						const QRgb pixel1 = palette.colorAt((data[offset + i] >> 4) & 0xf);
						const int twidx0 = twiddler.index(i * 2 + 0);
						const int twidx1 = twiddler.index(i * 2 + 1);
						const int x0 = twidx0 % currentWidth;
						const int y0 = twidx0 / currentWidth;
						img.setPixel(x0, y0, pixel0);
						const int x1 = twidx1 % currentWidth;
						const int y1 = twidx1 / currentWidth;
						img.setPixel(x1, y1, pixel1);
					}

					offset += (currentWidth * currentHeight) / 2;
				}

				decodedImages.push_front(img);

				currentWidth *= 2;
				currentHeight *= 2;
			}

		} else if (isFormat(textureType, PIXELFORMAT_PAL8BPP)) {
			int currentWidth = width;
			int currentHeight = height;
			int offset = 0;

			if (textureType & FLAG_MIPMAPPED) {
				currentWidth = 1;
				currentHeight = 1;
				offset = MIPMAP_OFFSET_8BPP;
			}

			while (currentWidth <= width && currentHeight <= height) {
				QImage img(currentWidth, currentHeight, QImage::Format_ARGB32);
				img.fill(Qt::transparent);
				const Twiddler twiddler(currentWidth, currentHeight);
				const int pixels = currentWidth * currentHeight;

				for (int i=0; i<pixels; i++) {
					const QRgb pixel = palette.colorAt(data[offset + i]);
					const int twidx = twiddler.index(i);
					const int x = twidx % currentWidth;
					const int y = twidx / currentWidth;
					img.setPixel(x, y, pixel);
				}

				decodedImages.push_front(img);
				offset += (currentWidth * currentHeight);

				currentWidth *= 2;
				currentHeight *= 2;
			}
		}
	} else if (is16BPP(textureType) && (textureType & FLAG_COMPRESSED)) {