Beispiel #1
0
void TSorterListItem::Init()
{
	// Set DataType based on entry_ref
	BFile theFile; 
	if ( theFile.SetTo(&m_EntryRef, B_READ_WRITE) == B_OK )
	{		
		// Create node
		BNodeInfo nodeInfo(&theFile);
		if (nodeInfo.InitCheck() == B_NO_ERROR)
		{
			if (IsAudio(nodeInfo))
				m_DataType = kAudioType;
			else if (IsImage(nodeInfo))
				m_DataType = kPictureType;
			else if (IsText(nodeInfo))
				m_DataType = kTextType;
			else if (IsVideo(nodeInfo))
				m_DataType = kVideoType;
		}
		else
		{
			m_DataType = kUnknownType;	
		}	
		
		theFile.Unset();
	}
}
//-----------------------------------------------------------------------------
// Purpose: 
// Input  : frame - 
//			x - 
//			y - 
//			*prcSubRect - 
//-----------------------------------------------------------------------------
void CEngineSprite::DrawFrameOfSize( RenderMode_t nRenderMode, int frame, int x, int y, int iWidth, int iHeight, const wrect_t *prcSubRect )
{
	// FIXME: If we ever call this with AVIs, need to have it call GetTexCoordRange and make that work
	Assert( !IsVideo() );
	float fLeft = 0;
	float fRight = 1;
	float fTop = 0;
	float fBottom = 1;

	if ( prcSubRect )
	{
		AdjustSubRect( this, frame, &fLeft, &fRight, &fTop, &fBottom, &iWidth, &iHeight, prcSubRect );
	}

	if ( giScissorTest && !Scissor( x, y, iWidth, iHeight, fLeft, fTop, fRight, fBottom ) )
		return;

	SetFrame( nRenderMode, frame );

	CMatRenderContextPtr pRenderContext( materials );
	IMesh* pMesh = pRenderContext->GetDynamicMesh( true, NULL, NULL, GetMaterial( nRenderMode ) );

	CMeshBuilder meshBuilder;
	meshBuilder.Begin( pMesh, MATERIAL_QUADS, 1 );

	float color[3];
	GetHUDSpriteColor( color );
	
	meshBuilder.Color3fv( color );
	meshBuilder.TexCoord2f( 0, fLeft, fTop );
	meshBuilder.Position3f( x, y, 0.0f );
	meshBuilder.AdvanceVertex();

	meshBuilder.Color3fv( color );
	meshBuilder.TexCoord2f( 0, fRight, fTop );
	meshBuilder.Position3f( x + iWidth, y, 0.0f );
	meshBuilder.AdvanceVertex();

	meshBuilder.Color3fv( color );
	meshBuilder.TexCoord2f( 0, fRight, fBottom );
	meshBuilder.Position3f( x + iWidth, y + iHeight, 0.0f );
	meshBuilder.AdvanceVertex();

	meshBuilder.Color3fv( color );
	meshBuilder.TexCoord2f( 0, fLeft, fBottom );
	meshBuilder.Position3f( x, y + iHeight, 0.0f );
	meshBuilder.AdvanceVertex();

	meshBuilder.End();
	pMesh->Draw();
}
//-----------------------------------------------------------------------------
// Returns the texture coordinate range	used to draw the sprite
//-----------------------------------------------------------------------------
void CEngineSprite::GetTexCoordRange( float *pMinU, float *pMinV, float *pMaxU, float *pMaxV )
{
	*pMaxU = 1.0f; 
	*pMaxV = 1.0f;
	if ( IsVideo() )
	{
		m_VideoMaterial->GetVideoTexCoordRange( pMaxU, pMaxV );
	}
	
	float flOOWidth = ( m_width != 0 ) ? 1.0f / m_width : 1.0f;
	float flOOHeight = ( m_height!= 0 ) ? 1.0f / m_height : 1.0f;

	*pMinU = 0.5f * flOOWidth; 
	*pMinV = 0.5f * flOOHeight;
	*pMaxU = (*pMaxU) - (*pMinU);
	*pMaxV = (*pMaxV) - (*pMinV);
}
void CEngineSprite::SetFrame( RenderMode_t nRenderMode, int nFrame )
{
	if ( IsVideo() )
	{
		m_VideoMaterial->SetFrame( nFrame );
		return;
	}


	IMaterial *pMaterial = m_material[nRenderMode];
	if ( !pMaterial )
		return;

	IMaterialVar* pFrameVar = pMaterial->FindVarFast( "$frame", &frameCache );
	if ( pFrameVar )
	{
		pFrameVar->SetIntValue( nFrame );
	}
}
IMaterial *CEngineSprite::GetMaterial( RenderMode_t nRenderMode, int nFrame ) 
{
	if ( nRenderMode == kRenderNone || nRenderMode == kRenderEnvironmental )
		return NULL;

	if ( IsVideo() )
	{
		m_VideoMaterial->SetFrame( nFrame );
	}
	
	
	IMaterial *pMaterial = m_material[nRenderMode];
	IMaterialVar* pFrameVar = pMaterial->FindVarFast( "$frame", &frameCache );
	if ( pFrameVar )
	{
		pFrameVar->SetIntValue( nFrame );
	}

	return pMaterial;
} 
Beispiel #6
0
TMovieCue::TMovieCue(entry_ref &theRef, int16 id,  TCueChannel *parent, BRect bounds, uint32 startTime) : 
	TVisualCue(id, parent, bounds, startTime, "PictureCue")
{	
	// Init member variables 
	m_Editor		= NULL;
	m_File			= NULL;
	
	//
	// Attempt to load data file
	//
	
	BNode theNode(&theRef);
	if (theNode.InitCheck() != B_OK)
		return;
		
	BNodeInfo nodeInfo(&theNode);
	if (nodeInfo.InitCheck() != B_OK)
		return;

	// First, make sure we have a valid ref
	if ( IsVideo(nodeInfo) )
	{
		// 	Create a BMessage that includes the entry_ref to send to our open routine
		BMessage *theMessage = new BMessage(B_REFS_RECEIVED);
		theMessage->AddRef("refs", &theRef);
		
		bool retVal = LoadMovieFile(theMessage);
		
		// We are succesful.  Init the cue
		if (retVal)
		{
			Init();
		}
		// If we have an error, ask the user to locate a new data file
		else
		{
			ShowPanel();
		}
	}		
}
//-------------------------------------------------------------------------------------
// Converts to a premultiplied alpha version of the texture
//-------------------------------------------------------------------------------------
_Use_decl_annotations_
HRESULT PremultiplyAlpha( const Image& srcImage, ScratchImage& image )
{
    if ( !srcImage.pixels )
        return E_POINTER;

    if ( IsCompressed(srcImage.format)
         || IsVideo(srcImage.format)
         || IsTypeless(srcImage.format)
         || !HasAlpha(srcImage.format) )
        return HRESULT_FROM_WIN32( ERROR_NOT_SUPPORTED );

#ifdef _M_X64
    if ( (srcImage.width > 0xFFFFFFFF) || (srcImage.height > 0xFFFFFFFF) )
        return E_INVALIDARG;
#endif

    HRESULT hr = image.Initialize2D( srcImage.format, srcImage.width, srcImage.height, 1, 1 );
    if ( FAILED(hr) )
        return hr;
   
    const Image *rimage = image.GetImage( 0, 0, 0 );
    if ( !rimage )
    {
        image.Release();
        return E_POINTER;
    }

    hr = _PremultiplyAlpha( srcImage, *rimage );
    if ( FAILED(hr) )
    {
        image.Release();
        return hr;
    }

    return S_OK;
}
bool TRefFilter::Filter(const entry_ref *theRef, BNode *theNode, struct stat_beos *theStat, const char *mimetype)
{
	//	Create BEntry and traverse to get source ref
	BEntry entry(theRef, true);
	if (entry.InitCheck() != B_OK)
	{
		ERROR("TRefFilter::Filter() - Error creating BEntry -\n");
		return false;
	}
		
	//	Create a node from ref
	BNode localNode(theRef);
	if (localNode.InitCheck() != B_OK)
	{
		ERROR("TRefFilter::Filter() - Error creating BNode -\n");
		return false;
	}
		
	//	Get node info
	BNodeInfo nodeInfo(&localNode);	 
	if (nodeInfo.InitCheck() != B_OK)
	{
		ERROR("TRefFilter::Filter() - Error getting BNodeInfo -\n");
		return false;
	}
	
	//	Get stat info
	struct stat st;
	if (entry.GetStat(&st) != B_OK)
	{
		ERROR("TRefFilter::Filter() - Error getting stat info -\n");
		return false;	
	}
	
	switch(m_FilterType)
	{
		case kAudioFilter:
		{
			// Allow directories
			if (S_ISDIR(st.st_mode)) 
				return true;
					
			// Allow audio
			if (IsAudio(nodeInfo))
				return true;
		}
		break;
	
		case kAudioAiffFilter:
		{
			// Allow directories
			if (S_ISDIR(st.st_mode)) 
				return true;
					
			// Allow audio
			if (IsAudioAiff(nodeInfo))
				return true;
		}
		break;
			
		case kImageFilter:
			{
				// Allow directories
				if (S_ISDIR(st.st_mode)) 
					return true;
					
				// Allow images
				if (IsImage(nodeInfo))
					return true;							
			}
			break;
			
		case kTextFilter:
			{
				// Allow directories
				if (S_ISDIR(st.st_mode)) 
					return true;
					
				// Allow text
				if (IsText(nodeInfo))
					return true;
			}
			break;
			
		case kVideoFilter:
			{
				// Allow directories
				if (S_ISDIR(st.st_mode)) 
					return true;
					
				// Allow video
				if (IsVideo(nodeInfo))
					return true;
			}
			break;
			
		case kCueSheetFilter:
			{
				// Allow directories
				if (S_ISDIR(st.st_mode)) 
					return true;
					
				// Allow CueSheets
				if (IsCueSheet(nodeInfo))
					return true;
			}
			break;
		
		case kDirectoryFilter:
		{
			// Allow directories
			if (S_ISDIR(st.st_mode)) 
				return true;					
		}
		break;
		
		default:
			return true;
	}	
	
	// Fail if we get here
	return false;
}
Beispiel #9
0
HRESULT 
TrackWriter::Close(Atom* patm)
{
    smart_ptr<Atom> ptrak = patm->CreateAtom('trak');

    // track header tkhd
    smart_ptr<Atom> ptkhd = ptrak->CreateAtom('tkhd');
    BYTE b[24*4];
    ZeroMemory(b, (24*4));

    // duration in movie timescale
    LONGLONG scaledur = long(Duration() * m_pMovie->MovieScale() / UNITS);
    int cHdr = 6 * 4;
    if (scaledur > 0x7fffffff)
    {
        // use 64-bit version (64-bit create/modify and duration
        cHdr = 9*4;
        b[0] = 1;
        WriteLong(ID(), b+(5*4));
        WriteI64(scaledur, b+(7*4));
    }
    else
    {
        WriteLong(ID(), b+(3*4));     // 1-base track id
        WriteLong(long(scaledur), b+(5*4));
        cHdr = 6*4;
    }
    b[3] = 7;   // enabled, in movie and in preview

    if (IsAudio())
    {
        b[cHdr + 12] = 0x01;
    }
    b[cHdr + 17] = 1;
    b[cHdr + 33] = 1;
    b[cHdr + 48] = 0x40;
    if (IsVideo())
    {
		WriteShort(m_pType->Width(), &b[cHdr + 52]);
		WriteShort(m_pType->Height(), &b[cHdr + 56]);
    }

    ptkhd->Append(b, cHdr + 60);
    ptkhd->Close();

    // track ref tref
    Handler()->WriteTREF(ptrak);

    // edts -- used for first-sample offet
    // -- note, this is in movie timescale, not track
    m_Durations.WriteEDTS(ptrak, m_pMovie->MovieScale());

    smart_ptr<Atom> pmdia = ptrak->CreateAtom('mdia');

    // Media Header mdhd
    smart_ptr<Atom> pmdhd = pmdia->CreateAtom('mdhd');
    ZeroMemory(b, 9*4);
    
    // duration now in track timescale
    scaledur = m_Durations.Duration() * m_Durations.Scale() / UNITS;
    if (scaledur > 0x7fffffff)
    {
        b[0] = 1;       // 64-bit
        WriteLong(m_Durations.Scale(), b+20);
        WriteI64(scaledur, b+24);         
        cHdr = 8*4;
    }
    else
    {
        WriteLong(m_Durations.Scale(), b+12);
        WriteLong(long(scaledur), b+16);         
        cHdr = 5*4;
    }
    // 'eng' as offset from 0x60 in 0 pad bit plus 3x5-bit (05 0xe 07)
    b[cHdr] = 0x15;
    b[cHdr+1] = 0xc7;
    pmdhd->Append(b, cHdr + 4);
    pmdhd->Close();

    // handler id hdlr
    smart_ptr<Atom> phdlr = pmdia->CreateAtom('hdlr');
    ZeroMemory(b, 25);
	WriteLong(Handler()->DataType(), b+4);
    WriteLong(Handler()->Handler(), b+8);
    phdlr->Append(b, 25);
    phdlr->Close();
    
    smart_ptr<Atom> pminf = pmdia->CreateAtom('minf');

    // media information header vmhd/smhd
    ZeroMemory(b, sizeof(b));
    if (IsVideo())
    {
        smart_ptr<Atom> pvmhd = pminf->CreateAtom('vmhd');
        b[3] = 1;
        pvmhd->Append(b, 12);
        pvmhd->Close();
    } else if (IsAudio())
    {
        smart_ptr<Atom> psmhd = pminf->CreateAtom('smhd');
        psmhd->Append(b, 8);
        psmhd->Close();
    } else {
        smart_ptr<Atom> pnmhd = pminf->CreateAtom('nmhd');
        pnmhd->Append(b, 4);
        pnmhd->Close();
    }

    // dinf/dref -- data reference
    smart_ptr<Atom> pdinf = pminf->CreateAtom('dinf');
    smart_ptr<Atom> pdref = pdinf->CreateAtom('dref');
    WriteLong(0, b);        // ver/flags
    WriteLong(1, b+4);      // entries
    pdref->Append(b, 8);
    smart_ptr<Atom> purl = pdref->CreateAtom('url ');
    // self-contained flag set, and no string required
    // -- all data is in this file
    b[3] = 1;
    purl->Append(b, 4);
    purl->Close();
    pdref->Close();
    pdinf->Close();

    smart_ptr<Atom> pstbl = pminf->CreateAtom('stbl');

    // Sample description
    // -- contains one descriptor atom mp4v/mp4a/... for each data reference.
    smart_ptr<Atom> pstsd = pstbl->CreateAtom('stsd');
    WriteLong(0, b);    // ver/flags
    WriteLong(1, b+4);    // count of entries
    pstsd->Append(b, 8);
    Handler()->WriteDescriptor(pstsd, ID(), 1, m_Durations.Scale());   // dataref = 1
    pstsd->Close();

    HRESULT hr = m_Durations.WriteTable(pstbl);
    if (SUCCEEDED(hr))
    {
        hr = m_Syncs.Write(pstbl);
    }
    if (SUCCEEDED(hr))
    {
        hr = m_SC.Write(pstbl);
    }
    if (SUCCEEDED(hr))
    {
        hr = m_Sizes.Write(pstbl);
    }
    if (SUCCEEDED(hr))
    {
        hr = m_CO.Write(pstbl);
    }
    pstbl->Close();
    pminf->Close();
    pmdia->Close();
    ptrak->Close();

    return hr;
}
nsresult
RtspMediaResource::OnConnected(uint8_t aTrackIdx,
                               nsIStreamingProtocolMetaData *meta)
{
  if (mIsConnected) {
    for (uint32_t i = 0 ; i < mTrackBuffer.Length(); ++i) {
      mTrackBuffer[i]->Start();
    }
    return NS_OK;
  }

  uint8_t tracks;
  mMediaStreamController->GetTotalTracks(&tracks);

  // If the preference of RTSP video feature is not enabled and the streaming is
  // video, we give up moving forward.
  if (!IsVideoEnabled() && IsVideo(tracks, meta)) {
    // Give up, report error to media element.
    nsCOMPtr<nsIRunnable> event =
      NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
    NS_DispatchToMainThread(event);
    return NS_ERROR_FAILURE;
  }
  uint64_t duration = 0;
  for (int i = 0; i < tracks; ++i) {
    nsCString rtspTrackId("RtspTrack");
    rtspTrackId.AppendInt(i);
    nsCOMPtr<nsIStreamingProtocolMetaData> trackMeta;
    mMediaStreamController->GetTrackMetaData(i, getter_AddRefs(trackMeta));
    MOZ_ASSERT(trackMeta);
    trackMeta->GetDuration(&duration);

    // Here is a heuristic to estimate the slot size.
    // For video track, calculate the width*height.
    // For audio track, use the BUFFER_SLOT_DEFAULT_SIZE because the w*h is 0.
    // Finally clamp them into (BUFFER_SLOT_DEFAULT_SIZE,BUFFER_SLOT_MAX_SIZE)
    uint32_t w, h;
    uint32_t slotSize;
    trackMeta->GetWidth(&w);
    trackMeta->GetHeight(&h);
    slotSize = clamped((int32_t)(w * h), BUFFER_SLOT_DEFAULT_SIZE,
                       BUFFER_SLOT_MAX_SIZE);
    mTrackBuffer.AppendElement(new RtspTrackBuffer(rtspTrackId.get(),
                                                   i, slotSize));
    mTrackBuffer[i]->Start();
  }

  if (!mDecoder) {
    return NS_ERROR_FAILURE;
  }

  // If the duration is 0, imply the stream is live stream.
  if (duration) {
    // Not live stream.
    mRealTime = false;
    mDecoder->SetInfinite(false);
    mDecoder->SetDuration(duration);
  } else {
    // Live stream.
    // Check the preference "media.realtime_decoder.enabled".
    if (!Preferences::GetBool("media.realtime_decoder.enabled", false)) {
      // Give up, report error to media element.
      nsCOMPtr<nsIRunnable> event =
        NS_NewRunnableMethod(mDecoder, &MediaDecoder::DecodeError);
      NS_DispatchToMainThread(event);
      return NS_ERROR_FAILURE;
    } else {
      mRealTime = true;
      bool seekable = false;
      mDecoder->SetInfinite(true);
      mDecoder->SetMediaSeekable(seekable);
    }
  }
  // Fires an initial progress event and sets up the stall counter so stall events
  // fire if no download occurs within the required time frame.
  mDecoder->Progress(false);

  MediaDecoderOwner* owner = mDecoder->GetMediaOwner();
  NS_ENSURE_TRUE(owner, NS_ERROR_FAILURE);
  dom::HTMLMediaElement* element = owner->GetMediaElement();
  NS_ENSURE_TRUE(element, NS_ERROR_FAILURE);

  element->FinishDecoderSetup(mDecoder, this);
  mIsConnected = true;

  return NS_OK;
}
Beispiel #11
0
static VideoItem ToVITransform(const Gtk::TreeRow& row) 
{
    return IsVideo( MediaStore::Get(row) );
}
Beispiel #12
0
OP_STATUS ContentDetector::DetectContentType()
{
	m_official_content_type = (URLContentType)m_url->GetAttribute(URL::KContentType, TRUE);

	UINT32 port = m_url->GetAttribute(URL::KResolvedPort);
	URLType url_type =  static_cast<URLType>(m_url->GetAttribute(URL::KType));

	if (((url_type == URL_HTTP && port != 80) || (url_type == URL_HTTPS && port != 443)) && !m_url->GetAttribute(URL::KHTTP_10_or_more))
	{
		// Do not sniff content-type for HTTP 0.9 servers on ports other than 80 for HTTP or 443 for HTTPS (CORE-35973).
		if (m_official_content_type == URL_UNDETERMINED_CONTENT)
			RETURN_IF_ERROR(m_url->SetAttribute(URL::KContentType, URL_TEXT_CONTENT));

		return OpStatus::OK;
	}

	RETURN_IF_ERROR(m_url->GetAttribute(URL::KOriginalMIME_Type, m_official_mime_type));
	int mime_length = m_official_mime_type.Length();

	URLContentType sniffed_content_type = m_official_content_type;
	OpStringC8 sniffed_mime_type;

	OpAutoPtr<URL_DataDescriptor> desc(m_url->GetDescriptor(NULL, URL::KFollowRedirect, TRUE));
	if (!desc.get())
	{
		m_octets = NULL;
		m_length = 0;
	}
	else
	{
		BOOL more;
		RETURN_IF_LEAVE(m_length = desc->RetrieveDataL(more));
		m_octets = desc->GetBuffer();
	}

#ifdef NEED_URL_EXTERNAL_GET_MIME_FROM_SAMPLE
	if (CheckExternally())
		return OpStatus::OK;
#endif

	if (m_url->GetAttribute(URL::KMultimedia))
	{
		RETURN_IF_ERROR(IsVideo(sniffed_content_type, sniffed_mime_type));
	}
	else if (m_official_mime_type.CompareI("text/plain") == 0)
	{
		RETURN_IF_ERROR(IsTextOrBinary(sniffed_content_type, sniffed_mime_type));
	}
	else if (m_official_mime_type.IsEmpty() ||
			 m_official_mime_type.CompareI("unknown/unknown") == 0 ||
			 m_official_mime_type.CompareI("application/unknown") == 0 ||
			 m_official_mime_type.CompareI("*/*") == 0)
	{
		RETURN_IF_ERROR(IsUnknownType(sniffed_content_type, sniffed_mime_type));
	}
	else if (m_official_mime_type.FindI("+xml") == mime_length - 4 ||
			 m_official_mime_type.CompareI("text/xml") == 0	||
			 m_official_mime_type.CompareI("application/xml") == 0)
	{
		return OpStatus::OK;
	}
	else if (m_official_mime_type.CompareI("image/", 6) == 0)
	{
		RETURN_IF_ERROR(IsImage(sniffed_content_type, sniffed_mime_type));
	}

	if (m_dont_leave_undetermined && sniffed_content_type == URL_UNDETERMINED_CONTENT)
	{
		sniffed_content_type = URL_UNKNOWN_CONTENT;
		m_url->SetAttribute(URL::KUntrustedContent, TRUE);
		sniffed_mime_type = "application/octet-stream";
	}

	if (sniffed_content_type != m_official_content_type)
	{
		/* CORE-39801: If we sniffed a container format (such as ZIP or GZIP),
		 * check if the URL string suggests a file extension of a known format
		 * using that format as its container. If so, allow it through if we
		 * originally did not get a valid Content-Type. */
		OpString fileext;
		TRAPD(err, m_url->GetAttributeL(URL::KUniNameFileExt_L, fileext, URL::KFollowRedirect));
		if (OpStatus::IsSuccess(err) && !fileext.IsEmpty())
		{
			Viewer *viewer = NULL;
			OP_STATUS rc = g_viewers->FindViewerByExtension(fileext, viewer);
			if (OpStatus::IsSuccess(rc) && viewer && viewer->AllowedContainer()
				&& 0 == op_strcmp(viewer->AllowedContainer(), sniffed_mime_type.CStr()))
			{
				sniffed_content_type = viewer->GetContentType();
				sniffed_mime_type = viewer->GetContentTypeString8();
			}
			else if (OpStatus::IsMemoryError(rc))
			{
				/* Propagate out-of-memory errors only; any other error just
				 * means we didn't find a Viewer object. */
				return rc;
			}
		}

		RETURN_IF_ERROR(m_url->SetAttribute(URL::KContentType, sniffed_content_type));
		RETURN_IF_ERROR(m_url->SetAttribute(URL::KUntrustedContent, TRUE));
		RETURN_IF_ERROR(m_url->SetAttribute(URL::KMIME_Type, sniffed_mime_type));
	}

	return OpStatus::OK;
}
Beispiel #13
0
//-------------------------------------------------------------------------------------
// Converts to a premultiplied alpha version of the texture (complex)
//-------------------------------------------------------------------------------------
_Use_decl_annotations_
HRESULT PremultiplyAlpha( const Image* srcImages, size_t nimages, const TexMetadata& metadata, ScratchImage& result )
{
    if ( !srcImages || !nimages )
        return E_INVALIDARG;

    if ( IsCompressed(metadata.format)
         || IsVideo(metadata.format)
         || IsTypeless(metadata.format)
         || !HasAlpha(metadata.format) )
        return HRESULT_FROM_WIN32( ERROR_NOT_SUPPORTED );

#ifdef _M_X64
    if ( (metadata.width > 0xFFFFFFFF) || (metadata.height > 0xFFFFFFFF) )
        return E_INVALIDARG;
#endif

    if ( metadata.IsPMAlpha() )
    {
        // Already premultiplied
        return E_FAIL;
    }

    TexMetadata mdata2 = metadata;
    mdata2.SetAlphaMode(TEX_ALPHA_MODE_PREMULTIPLIED);
    HRESULT hr = result.Initialize( mdata2 );
    if ( FAILED(hr) )
        return hr;

    if ( nimages != result.GetImageCount() )
    {
        result.Release();
        return E_FAIL;
    }

    const Image* dest = result.GetImages();
    if ( !dest )
    {
        result.Release();
        return E_POINTER;
    }

    for( size_t index=0; index < nimages; ++index )
    {
        const Image& src = srcImages[ index ];
        if ( src.format != metadata.format )
        {
            result.Release();
            return E_FAIL;
        }

#ifdef _M_X64
        if ( (src.width > 0xFFFFFFFF) || (src.height > 0xFFFFFFFF) )
            return E_FAIL;
#endif
        const Image& dst = dest[ index ];
        assert( dst.format == metadata.format );

        if ( src.width != dst.width || src.height != dst.height )
        {
            result.Release();
            return E_FAIL;
        }

        hr = _PremultiplyAlpha( src, dst );
        if ( FAILED(hr) )
        {
            result.Release();
            return hr;
        }
    }

    return S_OK;
}