예제 #1
0
SIZE_T USoundWave::GetResourceSize(EResourceSizeMode::Type Mode)
{
	int32 CalculatedResourceSize = 0;

	if (DecompressionType == DTYPE_Native)
	{
		// If we've been decompressed, need to account for decompressed and also compressed
		CalculatedResourceSize += RawPCMDataSize;
	}
	else if (DecompressionType == DTYPE_RealTime)
	{
		if (CachedRealtimeFirstBuffer)
		{
			CalculatedResourceSize += MONO_PCM_BUFFER_SIZE * NumChannels;
		}
	}

	if (GEngine && GEngine->GetMainAudioDevice())
	{
		// Don't add compressed data to size of streaming sounds
		if (!FPlatformProperties::SupportsAudioStreaming() || !IsStreaming())
		{
			CalculatedResourceSize += GetCompressedDataSize(GEngine->GetMainAudioDevice()->GetRuntimeFormat(this));
		}
	}

	return CalculatedResourceSize;
}
SkData* CompressBitmapToFormat(const SkPixmap& pixmap, Format format) {
    int compressedDataSize = GetCompressedDataSize(format, pixmap.width(), pixmap.height());
    if (compressedDataSize < 0) {
        return NULL;
    }

    const uint8_t* src = reinterpret_cast<const uint8_t*>(pixmap.addr());
    SkData* dst = SkData::NewUninitialized(compressedDataSize);

    if (!CompressBufferToFormat((uint8_t*)dst->writable_data(), src, pixmap.colorType(),
                                pixmap.width(), pixmap.height(), pixmap.rowBytes(), format)) {
        dst->unref();
        dst = NULL;
    }
    return dst;
}
예제 #3
0
SkData *CompressBitmapToFormat(const SkBitmap &bitmap, Format format) {
    SkAutoLockPixels alp(bitmap);

    int compressedDataSize = GetCompressedDataSize(format, bitmap.width(), bitmap.height());
    if (compressedDataSize < 0) {
        return NULL;
    }

    const uint8_t* src = reinterpret_cast<const uint8_t*>(bitmap.getPixels());
    uint8_t* dst = reinterpret_cast<uint8_t*>(sk_malloc_throw(compressedDataSize));

    if (CompressBufferToFormat(dst, src, bitmap.colorType(), bitmap.width(), bitmap.height(),
                               bitmap.rowBytes(), format)) {
        return SkData::NewFromMalloc(dst, compressedDataSize);
    }

    sk_free(dst);
    return NULL;
}
예제 #4
0
SIZE_T USoundWave::GetResourceSize(EResourceSizeMode::Type Mode)
{
	if (!GEngine)
	{
		return 0;
	}

	SIZE_T CalculatedResourceSize = 0;

	if (FAudioDevice* LocalAudioDevice = GEngine->GetMainAudioDevice())
	{
		if (LocalAudioDevice->HasCompressedAudioInfoClass(this) && DecompressionType == DTYPE_Native)
		{
			// In non-editor builds ensure that the "native" sound wave has unloaded its compressed asset at this point.
			// DTYPE_Native assets fully decompress themselves on load and are supposed to unload the compressed asset when it finishes.
			// However, in the editor, it's possible for an asset to be DTYPE_Native and not referenced by currently loaded level and thus not
			// actually loaded (and fully decompressed) before its ResourceSize is queried.
			if (!GIsEditor)
			{
				ensureMsgf(ResourceSize == 0, TEXT("ResourceSize for DTYPE_Native USoundWave '%s' was not 0 (%d)."), *GetName(), ResourceSize);
			}
			CalculatedResourceSize = RawPCMDataSize;
		}
		else 
		{
			if (DecompressionType == DTYPE_RealTime && CachedRealtimeFirstBuffer)
			{
				CalculatedResourceSize = MONO_PCM_BUFFER_SIZE * NumChannels;
			}
			
			if ((!FPlatformProperties::SupportsAudioStreaming() || !IsStreaming()))
			{
				CalculatedResourceSize += GetCompressedDataSize(LocalAudioDevice->GetRuntimeFormat(this));
			}
		}
	}

	return CalculatedResourceSize;
}
예제 #5
0
int32 USoundWave::GetResourceSizeForFormat(FName Format)
{
	return GetCompressedDataSize(Format);
}