Ejemplo n.º 1
0
static	DWORD	widOpenHelper(WAVEMAPDATA* wim, UINT idx,
			      LPWAVEOPENDESC lpDesc, LPWAVEFORMATEX lpwfx,
			      DWORD dwFlags)
{
    DWORD	ret;

    TRACE("(%p %04x %p %p %08x)\n", wim, idx, lpDesc, lpwfx, dwFlags);

    /* source is always PCM, so the formulas below apply */
    lpwfx->nBlockAlign = (lpwfx->nChannels * lpwfx->wBitsPerSample) / 8;
    lpwfx->nAvgBytesPerSec = lpwfx->nSamplesPerSec * lpwfx->nBlockAlign;
    if (dwFlags & WAVE_FORMAT_QUERY) {
	ret = acmStreamOpen(NULL, 0, lpwfx, lpDesc->lpFormat, NULL, 0L, 0L, ACM_STREAMOPENF_QUERY);
    } else {
	ret = acmStreamOpen(&wim->hAcmStream, 0, lpwfx, lpDesc->lpFormat, NULL, 0L, 0L, 0L);
    }
    if (ret == MMSYSERR_NOERROR) {
        ret = waveInOpen(&wim->u.in.hInnerWave, idx, lpwfx,
                         (DWORD_PTR)widCallback, (DWORD_PTR)wim,
                         (dwFlags & ~CALLBACK_TYPEMASK) | CALLBACK_FUNCTION);
	if (ret != MMSYSERR_NOERROR && !(dwFlags & WAVE_FORMAT_QUERY)) {
	    acmStreamClose(wim->hAcmStream, 0);
	    wim->hAcmStream = 0;
	}
    }
    TRACE("ret = %08x\n", ret);
    return ret;
}
Ejemplo n.º 2
0
void CSoundStream::Play	( BOOL loop, int cnt )
{
	VERIFY(Sound);

	if (isPause) { Pause(); return; }
	if (dwStatus & DSBSTATUS_PLAYING) return;
    dwDecPos		= 0;
	isPresentData	= true;
//----------------
	if (hAcmStream){
		CHK_DX(acmStreamClose(hAcmStream,0));
	}
	CHK_DX(acmStreamOpen(&hAcmStream,0,psrc,pwfx,0,NULL,0,0));
	CHK_DX(acmStreamSize(hAcmStream,dwDestBufSize,LPDWORD(&dwSrcBufSize),ACM_STREAMSIZEF_DESTINATION));
	// alloc source data buffer
	VERIFY(dwSrcBufSize);
	xr_free(WaveSource);
	WaveSource = (unsigned char *)xr_malloc(dwSrcBufSize);

	// seek to data start
	hf->seek	(DataPos);
	writepos	= 0;
	Decompress	(WaveDest);
	writepos	=stream.cbDstLengthUsed;
//-------
	iLoopCountRested= cnt;
	bMustLoop		= loop;
	bMustPlay		= true;
}
Ejemplo n.º 3
0
LRESULT CAcm::Open(WAVEFORMATEX *pWFormat,LPVOID lpSrcBuf,DWORD dwSrcLength){
	Close();

	//	忘れんうちに引数をコピー:p
	m_lpSrcWFormat	= pWFormat;
	m_lpSrcBuf		= lpSrcBuf;
	m_dwSrcLength	= dwSrcLength;

	//	構造体初期化
	ZERO(m_destWFormat);
	ZERO(m_acmheader);
	m_hAcm	= NULL;
	m_dwDestLength = 0;

	m_destWFormat.wFormatTag = WAVE_FORMAT_PCM;	//	PCMになって欲しいねん!
	if (acmFormatSuggest(NULL,pWFormat,&m_destWFormat,sizeof(WAVEFORMATEX),ACM_FORMATSUGGESTF_WFORMATTAG)!=0){
		return 1;	//	acm無いんとちゃう?
	}
	if (acmStreamOpen(&m_hAcm,NULL,pWFormat,&m_destWFormat,NULL,NULL,NULL,ACM_STREAMOPENF_NONREALTIME)!=0){
		return 2;	//	acmおかしんとちゃう?
	}
	if (acmStreamSize(m_hAcm,dwSrcLength,&m_dwDestLength,ACM_STREAMSIZEF_SOURCE)!=0){
		return 3;	//	なんでやねんと言いたい(笑)
	}

	if (m_dwDestLength == 0) return 4;	//	なぜじゃー

	m_bOpen = true;	//	Openに成功。これより任務に移る:p
	return 0;
}
Ejemplo n.º 4
0
static HRESULT AVIFILE_OpenCompressor(IAVIStreamImpl *This)
{
  HRESULT hr;

  /* pre-conditions */
  assert(This != NULL);
  assert(This->pStream != NULL);

  if (This->has != NULL)
    return AVIERR_OK;

  if (This->lpInFormat == NULL) {
    /* decode or encode the data from pStream */
    hr = AVIStreamFormatSize(This->pStream, This->sInfo.dwStart, &This->cbInFormat);
    if (FAILED(hr))
      return hr;
    This->lpInFormat = HeapAlloc(GetProcessHeap(), 0, This->cbInFormat);
    if (This->lpInFormat == NULL)
      return AVIERR_MEMORY;

    hr = IAVIStream_ReadFormat(This->pStream, This->sInfo.dwStart,
			       This->lpInFormat, &This->cbInFormat);
    if (FAILED(hr))
      return hr;

    if (This->lpOutFormat == NULL) {
      /* we must decode to default format */
      This->cbOutFormat = sizeof(PCMWAVEFORMAT);
      This->lpOutFormat = HeapAlloc(GetProcessHeap(), 0, This->cbOutFormat);
      if (This->lpOutFormat == NULL)
	return AVIERR_MEMORY;

      This->lpOutFormat->wFormatTag = WAVE_FORMAT_PCM;
      if (acmFormatSuggest(NULL, This->lpInFormat, This->lpOutFormat,
			   This->cbOutFormat, ACM_FORMATSUGGESTF_WFORMATTAG) != S_OK)
	return AVIERR_NOCOMPRESSOR;
    }
  } else if (This->lpOutFormat == NULL)
    return AVIERR_ERROR; /* To what should I encode? */

  if (acmStreamOpen(&This->has, NULL, This->lpInFormat, This->lpOutFormat,
		    NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME) != S_OK)
    return AVIERR_NOCOMPRESSOR;

  /* update AVISTREAMINFO structure */
  This->sInfo.dwSampleSize = This->lpOutFormat->nBlockAlign;
  This->sInfo.dwScale      = This->lpOutFormat->nBlockAlign;
  This->sInfo.dwRate       = This->lpOutFormat->nAvgBytesPerSec;
  This->sInfo.dwQuality    = (DWORD)ICQUALITY_DEFAULT;
  SetRectEmpty(&This->sInfo.rcFrame);

  /* convert positions and sizes to output format */
  CONVERT_STREAM_to_THIS(&This->sInfo.dwStart);
  CONVERT_STREAM_to_THIS(&This->sInfo.dwLength);
  CONVERT_STREAM_to_THIS(&This->sInfo.dwSuggestedBufferSize);

  return AVIERR_OK;
}
Ejemplo n.º 5
0
static HRESULT ACMWrapper_ConnectInput(TransformFilterImpl* pTransformFilter, const AM_MEDIA_TYPE * pmt)
{
    ACMWrapperImpl* This = (ACMWrapperImpl*)pTransformFilter;
    MMRESULT res;

    TRACE("(%p)->(%p)\n", This, pmt);

    if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Audio)) &&
        (!memcmp(((char*)&pmt->subtype)+4, ((char*)&MEDIATYPE_Audio)+4, sizeof(GUID)-4)) && /* Check root (GUID w/o FOURCC) */
        (IsEqualIID(&pmt->formattype, &FORMAT_WaveFormatEx)))
    {
        HACMSTREAM drv;
        AM_MEDIA_TYPE* outpmt = &((OutputPin*)This->tf.ppPins[1])->pin.mtCurrent;
        This->pWfIn = (LPWAVEFORMATEX)pmt->pbFormat;

	/* HACK */
	/* TRACE("ALIGN = %d\n", pACMWrapper->pWfIn->nBlockAlign); */
	/* pACMWrapper->pWfIn->nBlockAlign = 1; */

	/* Set output audio data to PCM */
        CopyMediaType(outpmt, pmt);
        outpmt->subtype.Data1 = WAVE_FORMAT_PCM;
	This->pWfOut = (WAVEFORMATEX*)outpmt->pbFormat;
	This->pWfOut->wFormatTag = WAVE_FORMAT_PCM;
	This->pWfOut->wBitsPerSample = 16;
	This->pWfOut->nBlockAlign = 4;
	This->pWfOut->cbSize = 0;
	This->pWfOut->nAvgBytesPerSec = This->pWfOut->nChannels * This->pWfOut->nSamplesPerSec
						* (This->pWfOut->wBitsPerSample/8);

        if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0)))
        {
            This->has = drv;

	    if ((res = acmStreamSize(drv, OUTPUT_BUFFER_SIZE, &This->max_size, ACM_STREAMSIZEF_DESTINATION))) {
		ERR("Cannot retrieve input buffer size error %d!\n", res);
		This->max_size = INPUT_BUFFER_SIZE;
	    }

	    TRACE("input buffer size %ld\n", This->max_size);

            /* Update buffer size of media samples in output */
            ((OutputPin*)This->tf.ppPins[1])->allocProps.cbBuffer = OUTPUT_BUFFER_SIZE;
	    
            TRACE("Connection accepted\n");
            return S_OK;
        }
	else
	    FIXME("acmStreamOpen returned %d\n", res);
        FreeMediaType(outpmt);
        TRACE("Unable to find a suitable ACM decompressor\n");
    }

    TRACE("Connection refused\n");
    return S_FALSE;
}
Ejemplo n.º 6
0
static HRESULT WINAPI ACMWrapper_SetMediaType(TransformFilter *tf, PIN_DIRECTION dir, const AM_MEDIA_TYPE * pmt)
{
    ACMWrapperImpl* This = impl_from_TransformFilter(tf);
    MMRESULT res;

    TRACE("(%p)->(%i %p)\n", This, dir, pmt);

    if (dir != PINDIR_INPUT)
        return S_OK;

    /* Check root (GUID w/o FOURCC) */
    if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Audio)) &&
        (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Audio)+4, sizeof(GUID)-4)) &&
        (IsEqualIID(&pmt->formattype, &FORMAT_WaveFormatEx)))
    {
        HACMSTREAM drv;
        WAVEFORMATEX *wfx = (WAVEFORMATEX*)pmt->pbFormat;
        AM_MEDIA_TYPE* outpmt = &This->tf.pmt;

        if (!wfx || wfx->wFormatTag == WAVE_FORMAT_PCM || wfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE)
            return VFW_E_TYPE_NOT_ACCEPTED;
        FreeMediaType(outpmt);

        This->pWfIn = (LPWAVEFORMATEX)pmt->pbFormat;

	/* HACK */
	/* TRACE("ALIGN = %d\n", pACMWrapper->pWfIn->nBlockAlign); */
	/* pACMWrapper->pWfIn->nBlockAlign = 1; */

	/* Set output audio data to PCM */
        CopyMediaType(outpmt, pmt);
        outpmt->subtype.Data1 = WAVE_FORMAT_PCM;
	This->pWfOut = (WAVEFORMATEX*)outpmt->pbFormat;
	This->pWfOut->wFormatTag = WAVE_FORMAT_PCM;
	This->pWfOut->wBitsPerSample = 16;
	This->pWfOut->nBlockAlign = This->pWfOut->wBitsPerSample * This->pWfOut->nChannels / 8;
	This->pWfOut->cbSize = 0;
	This->pWfOut->nAvgBytesPerSec = This->pWfOut->nChannels * This->pWfOut->nSamplesPerSec
						* (This->pWfOut->wBitsPerSample/8);

        if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0)))
        {
            This->has = drv;

            TRACE("Connection accepted\n");
            return S_OK;
        }
	else
	    FIXME("acmStreamOpen returned %d\n", res);
        FreeMediaType(outpmt);
        TRACE("Unable to find a suitable ACM decompressor\n");
    }

    TRACE("Connection refused\n");
    return VFW_E_TYPE_NOT_ACCEPTED;
}
Ejemplo n.º 7
0
int ToADPCM(short *Source, short *Dest, int Size)
{
    int Src_size;
    int Dest_Size;

    Wave_Format.wFormatTag = WAVE_FORMAT_PCM;
    Wave_Format.nChannels = 1;
    Wave_Format.cbSize = 0;
    Wave_Format.wBitsPerSample = 16;
    Wave_Format.nSamplesPerSec = 44100;
    Wave_Format.nBlockAlign = Wave_Format.nChannels * Wave_Format.wBitsPerSample / 8;
    Wave_Format.nAvgBytesPerSec = Wave_Format.nSamplesPerSec * Wave_Format.nBlockAlign;

    ADPCM_Format.wfx.wFormatTag = WAVE_FORMAT_IMA_ADPCM;
    acmFormatSuggest(NULL, (LPWAVEFORMATEX) &Wave_Format, (LPWAVEFORMATEX) &ADPCM_Format, sizeof(TrueSpeech_Format), ACM_FORMATSUGGESTF_WFORMATTAG);
    acmStreamOpen(&Pack_Stream, NULL, (LPWAVEFORMATEX) &Wave_Format, (LPWAVEFORMATEX) &ADPCM_Format, NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME);

    Src_size = Size;
    unsigned long rawbufsize = 0;
    acmStreamSize(Pack_Stream, Src_size, &rawbufsize, ACM_STREAMSIZEF_SOURCE);
    Uint8 *Pack_Buf = (Uint8 *) malloc(Src_size + 8);
    memset(Pack_Buf, 0, Src_size + 8);
    Uint8 *rawbuf = (Uint8 *) malloc(rawbufsize + 8);
    memset(rawbuf, 0, rawbufsize + 8);

    ACMSTREAMHEADER Pack_Stream_Head;
    ZeroMemory(&Pack_Stream_Head, sizeof(ACMSTREAMHEADER));
    Pack_Stream_Head.cbStruct = sizeof(ACMSTREAMHEADER);
    Pack_Stream_Head.pbSrc = (Uint8 *) Pack_Buf;
    Pack_Stream_Head.cbSrcLength = Src_size;
    Pack_Stream_Head.pbDst = rawbuf;
    Pack_Stream_Head.cbDstLength = rawbufsize;
    acmStreamPrepareHeader(Pack_Stream, &Pack_Stream_Head, 0);

    memcpy(Pack_Buf, Source, Src_size);

    acmStreamConvert(Pack_Stream, &Pack_Stream_Head, 0);
    Dest_Size = Pack_Stream_Head.cbDstLengthUsed;
    if(Dest_Size < Src_size)
    {
        memcpy(Dest, rawbuf, Dest_Size);
    }
    else
    {
        Dest_Size = 0;
    }

    acmStreamUnprepareHeader(Pack_Stream, &Pack_Stream_Head, 0);
    if(rawbuf) free(rawbuf);
    if(Pack_Buf) free(Pack_Buf);
    acmStreamClose(Pack_Stream, 0);

    return(Dest_Size);
}
Ejemplo n.º 8
0
static HRESULT ACMWrapper_ConnectInput(InputPin *pin, const AM_MEDIA_TYPE * pmt)
{
    ACMWrapperImpl* This = (ACMWrapperImpl *)pin->pin.pinInfo.pFilter;
    MMRESULT res;

    TRACE("(%p)->(%p)\n", This, pmt);

    /* Check root (GUID w/o FOURCC) */
    if ((IsEqualIID(&pmt->majortype, &MEDIATYPE_Audio)) &&
        (!memcmp(((const char *)&pmt->subtype)+4, ((const char *)&MEDIATYPE_Audio)+4, sizeof(GUID)-4)) &&
        (IsEqualIID(&pmt->formattype, &FORMAT_WaveFormatEx)))
    {
        HACMSTREAM drv;
        AM_MEDIA_TYPE* outpmt = &This->tf.pmt;
        FreeMediaType(outpmt);

        This->pWfIn = (LPWAVEFORMATEX)pmt->pbFormat;

	/* HACK */
	/* TRACE("ALIGN = %d\n", pACMWrapper->pWfIn->nBlockAlign); */
	/* pACMWrapper->pWfIn->nBlockAlign = 1; */

	/* Set output audio data to PCM */
        CopyMediaType(outpmt, pmt);
        outpmt->subtype.Data1 = WAVE_FORMAT_PCM;
	This->pWfOut = (WAVEFORMATEX*)outpmt->pbFormat;
	This->pWfOut->wFormatTag = WAVE_FORMAT_PCM;
	This->pWfOut->wBitsPerSample = 16;
	This->pWfOut->nBlockAlign = This->pWfOut->wBitsPerSample * This->pWfOut->nChannels / 8;
	This->pWfOut->cbSize = 0;
	This->pWfOut->nAvgBytesPerSec = This->pWfOut->nChannels * This->pWfOut->nSamplesPerSec
						* (This->pWfOut->wBitsPerSample/8);

        if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0)))
        {
            This->has = drv;

            /* Update buffer size of media samples in output */
            ((OutputPin*)This->tf.ppPins[1])->allocProps.cbBuffer = This->pWfOut->nAvgBytesPerSec / 2;
            TRACE("Connection accepted\n");
            return S_OK;
        }
	else
	    FIXME("acmStreamOpen returned %d\n", res);
        FreeMediaType(outpmt);
        TRACE("Unable to find a suitable ACM decompressor\n");
    }

    TRACE("Connection refused\n");
    return VFW_E_TYPE_NOT_ACCEPTED;
}
Ejemplo n.º 9
0
static gboolean
acmmp3dec_setup (ACMMP3Dec * dec)
{
  MMRESULT res;
  int destBufferSize;

  acmmp3dec_set_input_format (dec);
  acmmp3dec_set_output_format (dec);

  res =
      acmStreamOpen (&dec->stream, NULL, (LPWAVEFORMATEX) & dec->infmt,
      &dec->outfmt, 0, 0, 0, 0);
  if (res) {
    GST_WARNING_OBJECT (dec, "Failed to open ACM stream: %d", res);
    return FALSE;
  }

  dec->header.cbStruct = sizeof (ACMSTREAMHEADER);
  dec->header.fdwStatus = 0;
  dec->header.dwUser = 0;

  dec->header.pbSrc = (BYTE *) g_malloc (ACM_BUFFER_SIZE);
  dec->header.cbSrcLength = ACM_BUFFER_SIZE;
  dec->header.cbSrcLengthUsed = 0;
  dec->header.dwSrcUser = 0;

  /* Ask what buffer size we need to use for our output */
  acmStreamSize (dec->stream, ACM_BUFFER_SIZE,
      (LPDWORD) & destBufferSize, ACM_STREAMSIZEF_SOURCE);

  dec->header.pbDst = (BYTE *) g_malloc (destBufferSize);
  dec->header.cbDstLength = destBufferSize;
  dec->header.cbDstLengthUsed = 0;
  dec->header.dwDstUser = 0;

  res = acmStreamPrepareHeader (dec->stream, &dec->header, 0);
  if (res) {
    GST_WARNING_OBJECT (dec, "Failed to prepare ACM stream: %x", res);
    return FALSE;
  }

  dec->output_caps = acmmp3dec_caps_from_format (&dec->outfmt);
  if (dec->output_caps) {
    gst_pad_set_caps (dec->srcpad, dec->output_caps);
  }

  dec->timestamp = GST_CLOCK_TIME_NONE;
  dec->is_setup = TRUE;
  return TRUE;
}
Ejemplo n.º 10
0
void CACMStream::Open()
{
	m_mmr = acmStreamOpen(&m_strm, 
						  NULL, 
						  m_srcFmt,
						  m_dstFmt,
						  NULL,
						  0,
						  0,
						  ACM_STREAMOPENF_NONREALTIME);
	
	if (m_mmr)
		raiseError(m_mmr, "Error opening ACM stream");

}
Ejemplo n.º 11
0
int
acm_cv_create (const converter_fmt_t *cfmt, u_char **state, uint32_t *state_len)
{
        LPHACMSTREAM lpa;
        WAVEFORMATEX wfxSrc, wfxDst;

        lpa        = (LPHACMSTREAM)xmalloc(sizeof(HACMSTREAM));

        acm_conv_init_fmt(&wfxSrc, cfmt->src_channels, cfmt->src_freq);
        acm_conv_init_fmt(&wfxDst, cfmt->dst_channels,   cfmt->dst_freq);

        if (acmStreamOpen(lpa, hDrv, &wfxSrc, &wfxDst, NULL, 0L, 0L, 0L)) {
                xfree(lpa);
                return FALSE;
        }

        *state     = (u_char *)lpa;
        *state_len = sizeof(HACMSTREAM);

        return TRUE;
}
Ejemplo n.º 12
0
static HRESULT WINAPI ACMWrapper_CompleteConnect(TransformFilter *tf, PIN_DIRECTION dir, IPin *pin)
{
    ACMWrapperImpl* This = (ACMWrapperImpl *)tf;
    MMRESULT res;
    HACMSTREAM drv;

    TRACE("(%p)\n", This);

    if (dir != PINDIR_INPUT)
        return S_OK;

    if (!(res = acmStreamOpen(&drv, NULL, This->pWfIn, This->pWfOut, NULL, 0, 0, 0)))
    {
        This->has = drv;

        TRACE("Connection accepted\n");
        return S_OK;
    }

    FIXME("acmStreamOpen returned %d\n", res);
    TRACE("Unable to find a suitable ACM decompressor\n");
    return VFW_E_TYPE_NOT_ACCEPTED;
}
Ejemplo n.º 13
0
BOOL ClSoundDS::CreateStreamBuffer(int handle,int arcFileNum,int playNum)
{
	int				size,stream,dataTopOffset,nextStream = -1;
	HRESULT			hr;
	RiffHead		riffHead;
	FmtHead			fmtHead;
	PCMWAVEFORMAT	pcmFormat;
	BOOL			bOGG = FALSE;

	ReleaseSoundBuffer(handle);	
	stream = lpReadFile->StreamOpenFileNum(arcFileNum,playNum,size);
	if(pack_bgmfile!=arcFileNum && pack_voice!=arcFileNum){
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&riffHead,sizeof(RiffHead));
		if(strncmp(riffHead.riff,"RIFF",4) || strncmp(riffHead.type,"WAVE",4)){
			lpReadFile->StreamCloseFile(arcFileNum,stream);
			return FALSE;
		}
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&fmtHead,sizeof(FmtHead));
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&pcmFormat,sizeof(PCMWAVEFORMAT));
	}else{
		char chkFmt[4];
		bOGG = TRUE;
		char *fname = lpReadFile->GetFileName(arcFileNum,playNum);
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&chkFmt,4);
		lpReadFile->StreamSeekFile(arcFileNum,stream,0,FILE_BEGIN);
		if(0!=strncmp(chkFmt,"OggS",4))return FALSE;
		dataTopOffset = 0;
		if(NULL==lpDSBufferTop){
			lpDSBufferTop = lpDSBufferTail = new ClSoundBuffer;
		}else{
			lpDSBufferTail->lpNext = new ClSoundBuffer;
			lpDSBufferTail->lpNext->lpPrev = lpDSBufferTail;
			lpDSBufferTail = lpDSBufferTail->lpNext;
		}
		lpDSBufferTail->dataTopOffset = dataTopOffset;
		lpDSBufferTail->o_dat = new OggDec;
		if(_strnicmp(&fname[strlen(fname)-6],"_A.",3) == 0){
			char workBuf[32];
			strcpy(workBuf,fname);
			workBuf[strlen(fname)-5] = 'B';
			nextStream = lpReadFile->SearchFile(arcFileNum,workBuf);
		}
		WAVEFORMATEX	dstWF;
		char			workBuf[4096];
		lpReadFile->StreamReadFile(arcFileNum,stream,workBuf,4096);
		lpDSBufferTail->o_dat->GetWaveformat(&dstWF,workBuf);

		DSBUFFERDESC	dsbdesc;
		ZeroMemory(&dsbdesc, sizeof(DSBUFFERDESC));
		dsbdesc.dwSize = sizeof(DSBUFFERDESC);
		dsbdesc.dwFlags = DSBCAPS_STATIC | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_LOCSOFTWARE | DSBCAPS_GLOBALFOCUS;
		dsbdesc.dwBufferBytes = dstWF.nAvgBytesPerSec *2;
		dsbdesc.lpwfxFormat = &dstWF;
		hr = lpDSound->CreateSoundBuffer(&dsbdesc, &lpTmpBuffer, NULL);
		lpTmpBuffer->QueryInterface(IID_IDirectSoundBuffer8,(LPVOID *)&lpDSBufferTail->lpDSBuffer);
		RELEASE(lpTmpBuffer);
		lpDSBufferTail->streamBlockSize = dstWF.nAvgBytesPerSec;
	}
	if(bOGG){
		pcmFormat.wf.wFormatTag = WAVE_FORMAT_OGG;
	}else if(pcmFormat.wf.wFormatTag != WAVE_FORMAT_PCM){
		WAVEFORMATEX	dstWF;
		WAVEFORMATEX	*pwfxInfo;
		WORD			cbExtraAlloc;   
		MMRESULT		mmResult;
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&cbExtraAlloc,sizeof(WORD));
		pwfxInfo = (WAVEFORMATEX *)cl_malloc(sizeof(WAVEFORMATEX) +cbExtraAlloc);
		CopyMemory(pwfxInfo,&pcmFormat,sizeof(PCMWAVEFORMAT));
		pwfxInfo->cbSize = cbExtraAlloc;
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)pwfxInfo +sizeof(WAVEFORMATEX),cbExtraAlloc);
		lpReadFile->StreamSeekFile(arcFileNum,stream,sizeof(RiffHead),FILE_BEGIN);
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&fmtHead,sizeof(FmtHead));
		dataTopOffset = sizeof(RiffHead) +sizeof(FmtHead);
		while(0!=strncmp(fmtHead.fmt,"data",4)){
			lpReadFile->StreamSeekFile(arcFileNum,stream,fmtHead.size,FILE_CURRENT);
			dataTopOffset += fmtHead.size;
			lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&fmtHead,sizeof(FmtHead));
			dataTopOffset += sizeof(FmtHead);
		}
		ZeroMemory(&dstWF,sizeof(dstWF));
		dstWF.wFormatTag = WAVE_FORMAT_PCM;
		mmResult = acmFormatSuggest(
			NULL,
			pwfxInfo,
			&dstWF,
			sizeof(dstWF),
			ACM_FORMATSUGGESTF_WFORMATTAG);
		if(mmResult != 0){
			myOutputDebugString("オーディオ圧縮が無効です\n");
			return FALSE;
		}
		if(NULL==lpDSBufferTop){
			lpDSBufferTop = lpDSBufferTail = new ClSoundBuffer;
		}else{
			lpDSBufferTail->lpNext = new ClSoundBuffer;
			lpDSBufferTail->lpNext->lpPrev = lpDSBufferTail;
			lpDSBufferTail = lpDSBufferTail->lpNext;
		}
		lpDSBufferTail->dataTopOffset = dataTopOffset;
		acmStreamOpen(&lpDSBufferTail->hAcm, NULL,pwfxInfo,&dstWF, NULL, 0L, 0L, ACM_STREAMOPENF_NONREALTIME);
		size = pwfxInfo->nAvgBytesPerSec;
		if(size % pwfxInfo->nBlockAlign){
			size = (size/pwfxInfo->nBlockAlign +1)*pwfxInfo->nBlockAlign;
		}
		acmStreamSize(lpDSBufferTail->hAcm,size, &lpDSBufferTail->acmDst.dwStreamSize, ACM_STREAMSIZEF_SOURCE);
		acmStreamSize(lpDSBufferTail->hAcm, lpDSBufferTail->acmDst.dwStreamSize, &lpDSBufferTail->acmSrc.dwStreamSize, ACM_STREAMSIZEF_DESTINATION);
		acmStreamSize(lpDSBufferTail->hAcm, lpDSBufferTail->acmSrc.dwStreamSize, &lpDSBufferTail->acmDst.dwStreamSize, ACM_STREAMSIZEF_SOURCE);
		cl_free(pwfxInfo);
		if(fmtHead.size < lpDSBufferTail->acmSrc.dwStreamSize*2){
			acmStreamClose(lpDSBufferTail->hAcm, 0);
			lpDSBufferTail->hAcm = NULL;
			lpReadFile->StreamCloseFile(arcFileNum,stream);
			if(lpDSBufferTop == lpDSBufferTail){
				delete lpDSBufferTail;
				lpDSBufferTop = lpDSBufferTail = NULL;
			}else{
				lpDSBufferTail = lpDSBufferTail->lpPrev;
				delete(lpDSBufferTail->lpNext);
				lpDSBufferTail->lpNext = NULL;
			}
			return -1;
		}
		lpDSBufferTail->acmSrc.lpStream = (LPBYTE)cl_malloc(lpDSBufferTail->acmSrc.dwStreamSize);
		lpDSBufferTail->acmDst.lpStream = (LPBYTE)cl_malloc(lpDSBufferTail->acmDst.dwStreamSize);
		ZeroMemory(&lpDSBufferTail->ash,sizeof(lpDSBufferTail->ash));
		lpDSBufferTail->ash.cbStruct		= sizeof(lpDSBufferTail->ash);
		lpDSBufferTail->ash.pbSrc			= lpDSBufferTail->acmSrc.lpStream;
		lpDSBufferTail->ash.cbSrcLength		= lpDSBufferTail->acmSrc.dwStreamSize;
		lpDSBufferTail->ash.dwSrcUser		= lpDSBufferTail->acmSrc.dwStreamSize;
		lpDSBufferTail->ash.pbDst			= lpDSBufferTail->acmDst.lpStream;
		lpDSBufferTail->ash.cbDstLength		= lpDSBufferTail->acmDst.dwStreamSize;
		lpDSBufferTail->ash.dwDstUser		= lpDSBufferTail->acmDst.dwStreamSize;
		acmStreamPrepareHeader(lpDSBufferTail->hAcm,&lpDSBufferTail->ash,0);
		DSBUFFERDESC	dsbdesc;
		ZeroMemory(&dsbdesc, sizeof(DSBUFFERDESC));
		dsbdesc.dwSize = sizeof(DSBUFFERDESC);
		dsbdesc.dwFlags = DSBCAPS_STATIC | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_LOCSOFTWARE | DSBCAPS_GLOBALFOCUS;
		dsbdesc.dwBufferBytes = lpDSBufferTail->acmDst.dwStreamSize *2;
		dsbdesc.lpwfxFormat = &dstWF;
		hr = lpDSound->CreateSoundBuffer(&dsbdesc, &lpTmpBuffer, NULL);
		lpTmpBuffer->QueryInterface(IID_IDirectSoundBuffer8,(LPVOID *)&lpDSBufferTail->lpDSBuffer);
		RELEASE(lpTmpBuffer);
		lpDSBufferTail->streamBlockSize = lpDSBufferTail->acmDst.dwStreamSize;
	}else{
		dataTopOffset = sizeof(RiffHead) +sizeof(FmtHead) +fmtHead.size;
		lpReadFile->StreamSeekFile(arcFileNum,stream,fmtHead.size -sizeof(PCMWAVEFORMAT),FILE_CURRENT);
		lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&fmtHead,sizeof(FmtHead));
		dataTopOffset += sizeof(FmtHead);
		while(0!=strncmp(fmtHead.fmt,"data",4)){
			lpReadFile->StreamSeekFile(arcFileNum,stream,fmtHead.size,FILE_CURRENT);
			dataTopOffset += fmtHead.size;
			lpReadFile->StreamReadFile(arcFileNum,stream,(char *)&fmtHead,sizeof(FmtHead));
			dataTopOffset += sizeof(FmtHead);
		}
		if(fmtHead.size < pcmFormat.wf.nAvgBytesPerSec*2){
			lpReadFile->StreamCloseFile(arcFileNum,stream);
			return -1;
		}
		if(NULL==lpDSBufferTop){
			lpDSBufferTop = lpDSBufferTail = new ClSoundBuffer;
		}else{
			lpDSBufferTail->lpNext = new ClSoundBuffer;
			lpDSBufferTail->lpNext->lpPrev = lpDSBufferTail;
			lpDSBufferTail = lpDSBufferTail->lpNext;
		}
		lpDSBufferTail->dataTopOffset = dataTopOffset;
		DSBUFFERDESC	dsbdesc;
		WAVEFORMATEX	audioFmt;
		ZeroMemory(&dsbdesc, sizeof(DSBUFFERDESC));
		dsbdesc.dwSize = sizeof(DSBUFFERDESC);
		dsbdesc.dwFlags = DSBCAPS_STATIC | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_LOCSOFTWARE | DSBCAPS_GLOBALFOCUS;
		CopyMemory(&audioFmt,&pcmFormat,sizeof(WAVEFORMAT));
		audioFmt.wBitsPerSample = pcmFormat.wBitsPerSample;
		audioFmt.cbSize = 0;
		dsbdesc.dwBufferBytes = audioFmt.nAvgBytesPerSec *2;
		dsbdesc.lpwfxFormat = &audioFmt;
		hr = lpDSound->CreateSoundBuffer(&dsbdesc, &lpTmpBuffer, NULL);
		lpTmpBuffer->QueryInterface(IID_IDirectSoundBuffer8,(LPVOID *)&lpDSBufferTail->lpDSBuffer);
		RELEASE(lpTmpBuffer);
		lpDSBufferTail->streamBlockSize = audioFmt.nAvgBytesPerSec;
	}
	lpDSBufferTail->arcFileNum = arcFileNum;
	lpDSBufferTail->musicNum = lpDSBufferTail->orgMusicNum = playNum;
	lpDSBufferTail->streamNum = stream;
	lpDSBufferTail->nextStream = nextStream;
	lpDSBufferTail->handle = handle;
	lpDSBufferTail->bufType = stream_sound;
	lpDSBufferTail->readFile = lpReadFile;
	lpDSBufferTail->wFormatTag = pcmFormat.wf.wFormatTag;
	return TRUE;
} // ClSoundDS::CreateStreamBuffer
Ejemplo n.º 14
0
static void test_prepareheader(void)
{
    HACMSTREAM has;
    ADPCMWAVEFORMAT *src;
    WAVEFORMATEX dst;
    MMRESULT mr;
    ACMSTREAMHEADER hdr;
    BYTE buf[sizeof(WAVEFORMATEX) + 32], pcm[2048], input[512];
    ADPCMCOEFSET *coef;

    src = (ADPCMWAVEFORMAT*)buf;
    coef = src->aCoef;
    src->wfx.cbSize = 32;
    src->wfx.wFormatTag = WAVE_FORMAT_ADPCM;
    src->wfx.nSamplesPerSec = 22050;
    src->wfx.wBitsPerSample = 4;
    src->wfx.nChannels = 1;
    src->wfx.nBlockAlign = 512;
    src->wfx.nAvgBytesPerSec = 11025;
    src->wSamplesPerBlock = 0x3f4;
    src->wNumCoef = 7;
    coef[0].iCoef1 = 0x0100;
    coef[0].iCoef2 = 0x0000;
    coef[1].iCoef1 = 0x0200;
    coef[1].iCoef2 = 0xff00;
    coef[2].iCoef1 = 0x0000;
    coef[2].iCoef2 = 0x0000;
    coef[3].iCoef1 = 0x00c0;
    coef[3].iCoef2 = 0x0040;
    coef[4].iCoef1 = 0x00f0;
    coef[4].iCoef2 = 0x0000;
    coef[5].iCoef1 = 0x01cc;
    coef[5].iCoef2 = 0xff30;
    coef[6].iCoef1 = 0x0188;
    coef[6].iCoef2 = 0xff18;

    dst.cbSize = 0;
    dst.wFormatTag = WAVE_FORMAT_PCM;
    dst.nSamplesPerSec = 22050;
    dst.wBitsPerSample = 8;
    dst.nChannels = 1;
    dst.nBlockAlign = dst.wBitsPerSample * dst.nChannels / 8;
    dst.nAvgBytesPerSec = dst.nSamplesPerSec * dst.nBlockAlign;

    mr = acmStreamOpen(&has, NULL, (WAVEFORMATEX*)src, &dst, NULL, 0, 0, 0);
    ok(mr == MMSYSERR_NOERROR, "open failed: 0x%x\n", mr);

    memset(input, 0, sizeof(input));
    memset(&hdr, 0, sizeof(hdr));
    hdr.cbStruct = sizeof(hdr);
    hdr.pbSrc = input;
    hdr.cbSrcLength = sizeof(input);
    hdr.pbDst = pcm;
    hdr.cbDstLength = sizeof(pcm);

    mr = acmStreamPrepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "prepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_PREPARED, "header wasn't prepared: 0x%x\n", hdr.fdwStatus);

    mr = acmStreamUnprepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "unprepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == 0, "header wasn't unprepared: 0x%x\n", hdr.fdwStatus);

    memset(&hdr, 0, sizeof(hdr));
    hdr.cbStruct = sizeof(hdr);
    hdr.pbSrc = input;
    hdr.cbSrcLength = sizeof(input);
    hdr.pbDst = pcm;
    hdr.cbDstLength = sizeof(pcm);
    hdr.fdwStatus = ACMSTREAMHEADER_STATUSF_DONE;

    mr = acmStreamPrepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "prepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == (ACMSTREAMHEADER_STATUSF_PREPARED | ACMSTREAMHEADER_STATUSF_DONE), "header wasn't prepared: 0x%x\n", hdr.fdwStatus);

    hdr.cbSrcLengthUsed = 12345;
    hdr.cbDstLengthUsed = 12345;
    hdr.fdwStatus &= ~ACMSTREAMHEADER_STATUSF_DONE;
    mr = acmStreamConvert(has, &hdr, ACM_STREAMCONVERTF_BLOCKALIGN);
    ok(mr == MMSYSERR_NOERROR, "convert failed: 0x%x\n", mr);
    ok(hdr.fdwStatus & ACMSTREAMHEADER_STATUSF_DONE, "conversion was not done: 0x%x\n", hdr.fdwStatus);
    ok(hdr.cbSrcLengthUsed == hdr.cbSrcLength, "expected %d, got %d\n", hdr.cbSrcLength, hdr.cbSrcLengthUsed);
    todo_wine
    ok(hdr.cbDstLengthUsed == 1010, "expected 1010, got %d\n", hdr.cbDstLengthUsed);

    mr = acmStreamUnprepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "unprepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_DONE, "header wasn't unprepared: 0x%x\n", hdr.fdwStatus);

    /* The 2 next tests are related to Lost Horizon (bug 24723) */
    memset(&hdr, 0, sizeof(hdr));
    hdr.cbStruct = sizeof(hdr);
    hdr.pbSrc = input;
    hdr.cbSrcLength = sizeof(input);
    hdr.pbDst = pcm;
    hdr.cbDstLength = -4;

    mr = acmStreamPrepareHeader(has, &hdr, 0);
    if (sizeof(void *) == 4) /* 64 bit fails on this test */
    {
        ok(mr == MMSYSERR_NOERROR, "prepare failed: 0x%x\n", mr);
        ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_PREPARED, "header wasn't prepared: 0x%x\n", hdr.fdwStatus);

        hdr.cbSrcLengthUsed = 12345;
        hdr.cbDstLengthUsed = 12345;
        hdr.fdwStatus &= ~ACMSTREAMHEADER_STATUSF_DONE;
        mr = acmStreamConvert(has, &hdr, ACM_STREAMCONVERTF_BLOCKALIGN);
        ok(mr == MMSYSERR_NOERROR, "convert failed: 0x%x\n", mr);
        ok(hdr.fdwStatus & ACMSTREAMHEADER_STATUSF_DONE, "conversion was not done: 0x%x\n", hdr.fdwStatus);
        ok(hdr.cbSrcLengthUsed == hdr.cbSrcLength, "expected %d, got %d\n", hdr.cbSrcLength, hdr.cbSrcLengthUsed);
        todo_wine
        ok(hdr.cbDstLengthUsed == 1010, "expected 1010, got %d\n", hdr.cbDstLengthUsed);

        mr = acmStreamUnprepareHeader(has, &hdr, 0);
        ok(mr == MMSYSERR_NOERROR, "unprepare failed: 0x%x\n", mr);
        ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_DONE, "header wasn't unprepared: 0x%x\n", hdr.fdwStatus);
    }
    else
        ok(mr == MMSYSERR_INVALPARAM, "expected 11, got %d\n", mr);

    memset(&hdr, 0, sizeof(hdr));
    hdr.cbStruct = sizeof(hdr);
    hdr.pbSrc = input;
    hdr.cbSrcLength = 24;
    hdr.pbDst = pcm;
    hdr.cbDstLength = -4;
    mr = acmStreamPrepareHeader(has, &hdr, 0);
    todo_wine {
        ok(mr == ACMERR_NOTPOSSIBLE, "expected 0x200, got 0x%x\n", mr);
        ok(hdr.fdwStatus == 0, "expected 0, got 0x%x\n", hdr.fdwStatus);

        hdr.cbSrcLengthUsed = 12345;
        hdr.cbDstLengthUsed = 12345;
        mr = acmStreamConvert(has, &hdr, ACM_STREAMCONVERTF_BLOCKALIGN);
        ok(mr == ACMERR_UNPREPARED, "expected 0x202, got 0x%x\n", mr);
        ok(hdr.cbSrcLengthUsed == 12345, "expected 12345, got %d\n", hdr.cbSrcLengthUsed);
        ok(hdr.cbDstLengthUsed == 12345, "expected 12345, got %d\n", hdr.cbDstLengthUsed);

        mr = acmStreamUnprepareHeader(has, &hdr, 0);
        ok(mr == ACMERR_UNPREPARED, "expected 0x202, got 0x%x\n", mr);
    }
    /* Less output space than required */
    memset(&hdr, 0, sizeof(hdr));
    hdr.cbStruct = sizeof(hdr);
    hdr.pbSrc = input;
    hdr.cbSrcLength = sizeof(input);
    hdr.pbDst = pcm;
    hdr.cbDstLength = 32;

    mr = acmStreamPrepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "prepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_PREPARED, "header wasn't prepared: 0x%x\n", hdr.fdwStatus);

    hdr.cbSrcLengthUsed = 12345;
    hdr.cbDstLengthUsed = 12345;
    hdr.fdwStatus &= ~ACMSTREAMHEADER_STATUSF_DONE;
    mr = acmStreamConvert(has, &hdr, ACM_STREAMCONVERTF_BLOCKALIGN);
    ok(mr == MMSYSERR_NOERROR, "convert failed: 0x%x\n", mr);
    ok(hdr.fdwStatus & ACMSTREAMHEADER_STATUSF_DONE, "conversion was not done: 0x%x\n", hdr.fdwStatus);
    todo_wine
    ok(hdr.cbSrcLengthUsed == hdr.cbSrcLength, "expected %d, got %d\n", hdr.cbSrcLength, hdr.cbSrcLengthUsed);
    todo_wine
    ok(hdr.cbDstLengthUsed == hdr.cbDstLength, "expected %d, got %d\n", hdr.cbDstLength, hdr.cbDstLengthUsed);

    mr = acmStreamUnprepareHeader(has, &hdr, 0);
    ok(mr == MMSYSERR_NOERROR, "unprepare failed: 0x%x\n", mr);
    ok(hdr.fdwStatus == ACMSTREAMHEADER_STATUSF_DONE, "header wasn't unprepared: 0x%x\n", hdr.fdwStatus);

    mr = acmStreamClose(has, 0);
    ok(mr == MMSYSERR_NOERROR, "close failed: 0x%x\n", mr);
}
Ejemplo n.º 15
0
static int preinit(sh_audio_t *sh_audio)
{
    HRESULT ret;
    WAVEFORMATEX *in_fmt = sh_audio->wf;
    DWORD srcsize = 0;
    acm_context_t *priv;

    priv = malloc(sizeof(acm_context_t));
    if (!priv)
	return 0;
    sh_audio->context = priv;

    mp_msg(MSGT_WIN32, MSGL_V, "======= Win32 (ACM) AUDIO Codec init =======\n");

//    priv->handle = NULL;

    priv->o_wf = malloc(sizeof(*priv->o_wf));
    if (!priv->o_wf)
    {
	mp_msg(MSGT_DECAUDIO,MSGL_ERR,MSGTR_ACMiniterror);
	return 0;
    }

    priv->o_wf->nChannels = in_fmt->nChannels;
    priv->o_wf->nSamplesPerSec = in_fmt->nSamplesPerSec;
    priv->o_wf->nAvgBytesPerSec = 2*in_fmt->nSamplesPerSec*in_fmt->nChannels;
    priv->o_wf->wFormatTag = WAVE_FORMAT_PCM;
    priv->o_wf->nBlockAlign = 2*in_fmt->nChannels;
    priv->o_wf->wBitsPerSample = 16;
//    priv->o_wf->wBitsPerSample = inf_fmt->wBitsPerSample;
    priv->o_wf->cbSize = 0;

    if ( mp_msg_test(MSGT_DECAUDIO,MSGL_V) )
    {
	mp_msg(MSGT_DECAUDIO, MSGL_V, "Input format:\n");
	print_wave_header(in_fmt, MSGL_V);
	mp_msg(MSGT_DECAUDIO, MSGL_V, "Output format:\n");
	print_wave_header(priv->o_wf, MSGL_V);
    }

    MSACM_RegisterDriver((const char *)sh_audio->codec->dll, in_fmt->wFormatTag, 0);
    ret = acmStreamOpen(&priv->handle, (HACMDRIVER)NULL, in_fmt,
			priv->o_wf, NULL, 0, 0, 0);
    if (ret)
    {
	if (ret == ACMERR_NOTPOSSIBLE)
	    mp_msg(MSGT_WIN32, MSGL_ERR, "ACM_Decoder: Unappropriate audio format\n");
	else
	    mp_msg(MSGT_WIN32, MSGL_ERR, "ACM_Decoder: acmStreamOpen error: %d\n",
		(int)ret);
	mp_msg(MSGT_DECAUDIO,MSGL_ERR,MSGTR_ACMiniterror);
	return 0;
    }
    mp_msg(MSGT_WIN32, MSGL_V, "Audio codec opened OK! ;-)\n");

    acmStreamSize(priv->handle, in_fmt->nBlockAlign, &srcsize, ACM_STREAMSIZEF_SOURCE);
    //if ( mp_msg_test(MSGT_DECAUDIO,MSGL_V) ) printf("Audio ACM output buffer min. size: %ld (reported by codec)\n", srcsize);
    srcsize *= 2;
    //if (srcsize < MAX_OUTBURST) srcsize = MAX_OUTBURST;
    if (!srcsize)
    {
	mp_msg(MSGT_WIN32, MSGL_WARN, "Warning! ACM codec reports srcsize=0\n");
	srcsize = 16384;
    }
    // limit srcsize to 4-16kb
    //while(srcsize && srcsize<4096) srcsize*=2;
    //while(srcsize>16384) srcsize/=2;
    sh_audio->audio_out_minsize=srcsize; // audio output min. size
    mp_msg(MSGT_WIN32,MSGL_V,"Audio ACM output buffer min. size: %ld\n",srcsize);

    acmStreamSize(priv->handle, srcsize, &srcsize, ACM_STREAMSIZEF_DESTINATION);
//    if(srcsize<in_fmt->nBlockAlign) srcsize=in_fmt->nBlockAlign;

    if (!srcsize)
    {
	mp_msg(MSGT_WIN32, MSGL_WARN, "Warning! ACM codec reports srcsize=0\n");
	srcsize = 2*in_fmt->nBlockAlign;
    }

    mp_msg(MSGT_WIN32,MSGL_V,"Audio ACM input buffer min. size: %ld\n",srcsize);

    sh_audio->audio_in_minsize=2*srcsize; // audio input min. size

    sh_audio->i_bps=sh_audio->wf->nAvgBytesPerSec;
    sh_audio->channels=priv->o_wf->nChannels;
    sh_audio->samplerate=priv->o_wf->nSamplesPerSec;
    sh_audio->samplesize=2;

    mp_msg(MSGT_DECVIDEO,MSGL_V,"INFO: Win32/ACM audio codec init OK!\n");
    return 1;
}
Ejemplo n.º 16
0
void ConvertNode(HWND hwnd, AFile* node, const char* fname, WORD tag)
{
    FSHandle* file;
    HANDLE wavfile;
    RIFFHeader riffhdr;
	ChunkHeader chunkhdr;
    DWORD riffsize,factsize,datasize,written,rate,buffpos,pcmsize,dstsize,sizeToFill,sizeFilled;
	DWORD pos_riffsize,pos_factsize,pos_datasize;
    WORD  channels,bits;
    char  str[MAX_PATH+100],*pcmBuffer=NULL,*dstBuffer=NULL;
	LPWAVEFORMATEX pwfex;
	ACMFORMATTAGDETAILS aftd={0};
	MMRESULT mmr;
	WAVEFORMATEX wfexPCM;
	HACMSTREAM hACMStream;
	ACMSTREAMHEADER acmshdr;

    if ((file=FSOpenForPlayback(hwnd,node,&rate,&channels,&bits))==NULL)
		return;
	wfexPCM.wFormatTag=WAVE_FORMAT_PCM;
	wfexPCM.nChannels=channels;
	wfexPCM.nSamplesPerSec=rate;
	wfexPCM.wBitsPerSample=bits;
	wfexPCM.cbSize=0;
	wfexPCM.nBlockAlign=channels*(bits/8);
	wfexPCM.nAvgBytesPerSec=rate*wfexPCM.nBlockAlign;
	switch (tag)
	{
		case WAVE_FORMAT_PCM:
			pwfex=NULL; // ???
			hACMStream=NULL;
			dstBuffer=NULL; // ???
			pcmBuffer=(char*)GlobalAlloc(GPTR,BUFFERSIZE);
			break;
		default:
			aftd.cbStruct=sizeof(aftd);
			aftd.dwFormatTag=tag;
			mmr=acmFormatTagDetails(NULL,&aftd,ACM_FORMATTAGDETAILSF_LARGESTSIZE);
			if (mmr!=MMSYSERR_NOERROR)
			{
				AFPLUGIN(node)->ShutdownPlayback(file);
				FSCloseFile(file);
				wsprintf(str,"Failed to get details for wave format tag: 0x%X",tag);
				ReportMMError(hwnd,mmr,str);
				return;
			}
			pwfex=(LPWAVEFORMATEX)LocalAlloc(LPTR,aftd.cbFormatSize);
			pwfex->wFormatTag=tag;
			mmr=acmFormatSuggest(NULL,&wfexPCM,pwfex,aftd.cbFormatSize,ACM_FORMATSUGGESTF_WFORMATTAG);
			if (mmr!=MMSYSERR_NOERROR)
			{
				LocalFree(pwfex);
				AFPLUGIN(node)->ShutdownPlayback(file);
				FSCloseFile(file);
				wsprintf(str,"No format suggested for wave format tag: 0x%X",tag);
				ReportMMError(hwnd,mmr,str);
				return;
			}
			mmr=acmStreamOpen(&hACMStream,NULL,&wfexPCM,pwfex,NULL,0,0,ACM_STREAMOPENF_NONREALTIME);
			if (mmr!=MMSYSERR_NOERROR)
			{
				LocalFree(pwfex);
				AFPLUGIN(node)->ShutdownPlayback(file);
				FSCloseFile(file);
				wsprintf(str,"Failed to open conversion stream for wave format tag: 0x%X",tag);
				ReportMMError(hwnd,mmr,str);
				return;
			}
			if (acmStreamSize(hACMStream,BUFFERSIZE,&dstsize,ACM_STREAMSIZEF_SOURCE)!=MMSYSERR_NOERROR)
				dstsize=BUFFERSIZE;
			pcmBuffer=(char*)GlobalAlloc(GPTR,BUFFERSIZE);
			dstBuffer=(char*)GlobalAlloc(GPTR,dstsize);
			memset(&acmshdr,0x00,sizeof(acmshdr)); // ???
			acmshdr.cbStruct=sizeof(ACMSTREAMHEADER);
			acmshdr.fdwStatus=0;
			acmshdr.pbSrc=pcmBuffer;
			acmshdr.cbSrcLength=BUFFERSIZE;
			acmshdr.cbSrcLengthUsed=0;
			acmshdr.pbDst=dstBuffer;
			acmshdr.cbDstLength=dstsize;
			acmshdr.cbDstLengthUsed=0;
			mmr=acmStreamPrepareHeader(hACMStream,&acmshdr,0L);
			if (mmr!=MMSYSERR_NOERROR)
			{
				GlobalFree(dstBuffer);
				GlobalFree(pcmBuffer);
				acmStreamClose(hACMStream,0);
				LocalFree(pwfex);
				AFPLUGIN(node)->ShutdownPlayback(file);
				FSCloseFile(file);
				ReportMMError(hwnd,mmr,"Failed to prepare conversion stream header.");
				return;
			}
			acmshdr.cbSrcLength=0;
	}
	if (!EnsureDirPresence(fname))
	{
		if (hACMStream!=NULL)
		{
			acmStreamUnprepareHeader(hACMStream,&acmshdr,0L);
			acmStreamClose(hACMStream,0);
		}
		GlobalFree(dstBuffer);
		GlobalFree(pcmBuffer);
		LocalFree(pwfex);
		AFPLUGIN(node)->ShutdownPlayback(file);
		FSCloseFile(file);
		return;
	}
    wavfile=CreateFile(fname,
					   GENERIC_WRITE,
					   FILE_SHARE_READ,
					   NULL,
					   CREATE_ALWAYS,
					   FILE_ATTRIBUTE_NORMAL,
					   NULL
					  );
    if (wavfile==INVALID_HANDLE_VALUE)
    {
		if (hACMStream!=NULL)
		{
			acmStreamUnprepareHeader(hACMStream,&acmshdr,0L);
			acmStreamClose(hACMStream,0);
		}
		GlobalFree(dstBuffer);
		GlobalFree(pcmBuffer);
		LocalFree(pwfex);
		AFPLUGIN(node)->ShutdownPlayback(file);
		FSCloseFile(file);
		wsprintf(str,"Cannot open WAV file: %s",fname);
		ReportError(hwnd,str,NULL);
		return;
    }
    ShowProgressHeaderMsg(fname);
	datasize=0;
	factsize=0;
    ShowProgressStateMsg("Writing RIFF header...");
    SetFilePointer(wavfile,0,NULL,FILE_BEGIN);
    lstrcpy(riffhdr.riffid,IDSTR_RIFF);
    lstrcpy(riffhdr.rifftype,IDSTR_WAVE);
	riffhdr.riffsize=0;
	WriteFile(wavfile,&riffhdr,sizeof(RIFFHeader),&written,NULL);
	pos_riffsize=SetFilePointer(wavfile,0,NULL,FILE_CURRENT)-sizeof(riffhdr.rifftype)-sizeof(riffhdr.riffsize);
	CorrectOddPos(wavfile);
	ShowProgressStateMsg("Writing fmt chunk...");
    lstrcpy(chunkhdr.id,IDSTR_fmt);
	switch (tag)
	{
		case WAVE_FORMAT_PCM:
			chunkhdr.size=sizeof(wfexPCM);
			WriteFile(wavfile,&chunkhdr,sizeof(chunkhdr),&written,NULL);
			WriteFile(wavfile,&wfexPCM,chunkhdr.size,&written,NULL);
			CorrectOddPos(wavfile);
			break;
		default:
			chunkhdr.size=aftd.cbFormatSize;
			WriteFile(wavfile,&chunkhdr,sizeof(chunkhdr),&written,NULL);
			WriteFile(wavfile,pwfex,chunkhdr.size,&written,NULL);
			CorrectOddPos(wavfile);
			lstrcpy(chunkhdr.id,IDSTR_fact);
			chunkhdr.size=sizeof(factsize);
			WriteFile(wavfile,&chunkhdr,sizeof(chunkhdr),&written,NULL);
			pos_factsize=SetFilePointer(wavfile,0,NULL,FILE_CURRENT);
			WriteFile(wavfile,&factsize,sizeof(factsize),&written,NULL);
			CorrectOddPos(wavfile);
	}
    lstrcpy(chunkhdr.id,IDSTR_data);
	chunkhdr.size=datasize;
	WriteFile(wavfile,&chunkhdr,sizeof(chunkhdr),&written,NULL);
	pos_datasize=SetFilePointer(wavfile,0,NULL,FILE_CURRENT)-sizeof(datasize);
    while (1)
    {
		if (IsCancelled())
			break;
		switch (tag)
		{
			case WAVE_FORMAT_PCM:
				sizeToFill=BUFFERSIZE;
				sizeFilled=0;
				break;
			default:
				if (acmshdr.cbSrcLengthUsed!=0L)
				{
					memmove(pcmBuffer,pcmBuffer+acmshdr.cbSrcLengthUsed,acmshdr.cbSrcLength-acmshdr.cbSrcLengthUsed);
					acmshdr.cbSrcLength-=acmshdr.cbSrcLengthUsed;
				}
				if (acmshdr.cbSrcLength<BUFFERSIZE)
					sizeToFill=BUFFERSIZE-acmshdr.cbSrcLength;
				sizeFilled=acmshdr.cbSrcLength;
		}
		wsprintf(str,"Converting %s data block to PCM...",file->node->afID);
		ShowProgressStateMsg(str);
		pcmsize=0;
		if (sizeToFill>0)
			pcmsize=AFPLUGIN(node)->FillPCMBuffer(file,pcmBuffer+sizeFilled,sizeToFill,&buffpos);
		if (tag==WAVE_FORMAT_PCM)
		{
			if (pcmsize==0L)
				break;
			ShowProgressStateMsg("Writing WAV data block...");
			WriteFile(wavfile,pcmBuffer,pcmsize,&written,NULL);
			if (written!=pcmsize)
			{
				ReportError(hwnd,"Failure writing WAV file.",NULL);
				SetCancelFlag();
				break;
			}
			datasize+=written;
		}
		else
		{
			acmshdr.cbSrcLength+=pcmsize;
			if (acmshdr.cbSrcLength==0L)
				break;
			acmshdr.fdwStatus^=ACMSTREAMHEADER_STATUSF_DONE;
			acmshdr.cbSrcLengthUsed=0;
			acmshdr.cbDstLength=dstsize;
			acmshdr.cbDstLengthUsed=0;
			wsprintf(str,"Compressing PCM data block...");
			ShowProgressStateMsg(str);
			mmr=acmStreamConvert(hACMStream,&acmshdr,ACM_STREAMCONVERTF_BLOCKALIGN);
			if (mmr!=MMSYSERR_NOERROR)
			{
				CloseHandle(wavfile);
				DeleteFile(fname);
				acmStreamUnprepareHeader(hACMStream,&acmshdr,0L);
				acmStreamClose(hACMStream,0);
				GlobalFree(dstBuffer);
				GlobalFree(pcmBuffer);
				LocalFree(pwfex);
				AFPLUGIN(node)->ShutdownPlayback(file);
				FSCloseFile(file);
				ReportMMError(hwnd,mmr,"Error during compression.");
				return;
			}
			if (acmshdr.cbSrcLengthUsed==0L)
			{
				acmshdr.fdwStatus^=ACMSTREAMHEADER_STATUSF_DONE;
				acmStreamConvert(hACMStream,&acmshdr,0L);
			}
			factsize+=acmshdr.cbSrcLengthUsed/wfexPCM.nBlockAlign;
			ShowProgressStateMsg("Writing WAV data block...");
			WriteFile(wavfile,dstBuffer,acmshdr.cbDstLengthUsed,&written,NULL);
			if (written!=acmshdr.cbDstLengthUsed)
			{
				ReportError(hwnd,"Failure writing WAV file.",NULL);
				SetCancelFlag();
				break;
			}
			datasize+=written;
		}
		ShowProgress(FSGetFilePointer(file),FSGetFileSize(file)); // ???
    }
    if (IsCancelled())
    {
		ShowProgressStateMsg("Deleting WAV file...");
		CloseHandle(wavfile);
		DeleteFile(fname);
    }
    else
    {
		CorrectOddPos(wavfile);
		ShowProgressStateMsg("Rewriting WAV header...");
		riffsize=GetFileSize(wavfile,NULL)-8;
		SetFilePointer(wavfile,pos_riffsize,NULL,FILE_BEGIN);
		WriteFile(wavfile,&riffsize,sizeof(riffsize),&written,NULL);
		if (tag!=WAVE_FORMAT_PCM)
		{
			SetFilePointer(wavfile,pos_factsize,NULL,FILE_BEGIN);
			WriteFile(wavfile,&factsize,sizeof(factsize),&written,NULL);
		}
		SetFilePointer(wavfile,pos_datasize,NULL,FILE_BEGIN);
		WriteFile(wavfile,&datasize,sizeof(datasize),&written,NULL);
		CloseHandle(wavfile);
    }
	if (hACMStream!=NULL)
	{
		ShowProgressStateMsg("Closing conversion stream...");
		acmStreamUnprepareHeader(hACMStream,&acmshdr,0L);
		acmStreamClose(hACMStream,0);
	}
	ShowProgressStateMsg("Freeing conversion buffers...");
	GlobalFree(dstBuffer);
	GlobalFree(pcmBuffer);
	LocalFree(pwfex);
	wsprintf(str,"Shutting down %s decoder...",file->node->afID);
    ShowProgressStateMsg(str);
    AFPLUGIN(node)->ShutdownPlayback(file);
	FSCloseFile(file);
}
Ejemplo n.º 17
0
/// <summary>
/// <c>wACMStreamOpen</c> 
/// </summary>
/// <remarks>
/// </remarks>
/// <param name="phas"></param>
/// <param name="had"></param>
/// <param name="pwfxSrc"></param>
/// <param name="pwfxDst"></param>
/// <param name="pwfltr"></param>
/// <param name="dwCallback"></param>
/// <param name="dwInstance"></param>
/// <param name="fdwOpen"></param>
/// <returns>HRESULT __stdcall</returns>
HRESULT __stdcall 
wACMStreamOpen(PHACMSTREAM phas, HACMDRIVER had, const PWAVEFORMATEX pwfxSrc, 
			   const PWAVEFORMATEX pwfxDst, PWAVEFILTER pwfltr, ULONG_PTR dwCallback, 
			   ULONG_PTR dwInstance, ULONG fdwOpen)
{
	PSTR		pszwfDst;
	PSTR		pszwfSrc;
	PSTR		pszErrorMessage;
	MMRESULT	hResult;
	char		BUffer2[128];
	char		Buffer[128];

	pszwfDst = GetWaveFormat(Buffer, pwfxDst);
	pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
	InternalFunctionSpew(
		"GameOS_DirectSound",
		"acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
		phas,
		had,
		pszwfSrc,
		pszwfDst,
		pwfltr,
		dwCallback,
		dwInstance,
		fdwOpen);

	hResult = acmStreamOpen(phas, had, pwfxSrc, pwfxDst, pwfltr, dwCallback, dwInstance, 1u);
	if (MMFAILED(hResult))
	{
		pszwfDst = GetWaveFormat(Buffer, pwfxDst);
		pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
		pszErrorMessage = ErrorNumberToMessage(hResult);
		if ( InternalFunctionPause(
			"FAILED (0x%x - %s) - acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
			hResult,
			pszErrorMessage,
			phas,
			had,
			pszwfSrc,
			pszwfDst,
			pwfltr,
			dwCallback,
			dwInstance,
			fdwOpen) )
			ENTER_DEBUGGER;
	}
	hResult = acmStreamOpen(phas, had, pwfxSrc, pwfxDst, pwfltr, dwCallback, dwInstance, fdwOpen);
	if (MMFAILED(hResult))
	{
		pszwfDst = GetWaveFormat(Buffer, pwfxDst);
		pszwfSrc = GetWaveFormat(BUffer2, pwfxSrc);
		pszErrorMessage = ErrorNumberToMessage(hResult);
		if ( InternalFunctionPause(
			"FAILED (0x%x - %s) - acmStreamOpen(0x%x, 0x%x, %s, %s, 0x%x,0x%x, 0x%x, 0x%x)",
			hResult,
			pszErrorMessage,
			phas,
			had,
			pszwfSrc,
			pszwfDst,
			pwfltr,
			dwCallback,
			dwInstance,
			fdwOpen) )
			ENTER_DEBUGGER;
	}

	return hResult;
}
Ejemplo n.º 18
0
int ToMP3(short *Source, short *Dest, int Size, int BitRate)
{
    int Src_size;
    int Dest_Size;

    Wave_Format.wFormatTag = WAVE_FORMAT_PCM;
    Wave_Format.nChannels = 1;
    Wave_Format.cbSize = 0;
    Wave_Format.wBitsPerSample = 16;
    Wave_Format.nSamplesPerSec = 44100;
    Wave_Format.nBlockAlign = Wave_Format.nChannels * Wave_Format.wBitsPerSample / 8;
    Wave_Format.nAvgBytesPerSec = Wave_Format.nSamplesPerSec * Wave_Format.nBlockAlign;

    MP3_Format.wfx.wFormatTag = WAVE_FORMAT_MPEGLAYER3;
    MP3_Format.wfx.cbSize = MPEGLAYER3_WFX_EXTRA_BYTES;
    MP3_Format.wfx.nChannels = 1;
    MP3_Format.wfx.nSamplesPerSec = 44100;
    MP3_Format.wfx.nAvgBytesPerSec = BitRate * (1000 / 8);
    MP3_Format.wfx.wBitsPerSample = 0;
    MP3_Format.wfx.nBlockAlign = 1;
    MP3_Format.wID = MPEGLAYER3_ID_MPEG;
    MP3_Format.fdwFlags = MPEGLAYER3_FLAG_PADDING_OFF;
    MP3_Format.nBlockSize = 0;
    MP3_Format.nFramesPerBlock = 0;
    MP3_Format.nCodecDelay = 0;

    acmStreamOpen(&Pack_Stream, NULL, (LPWAVEFORMATEX) &Wave_Format, (LPWAVEFORMATEX) &MP3_Format, NULL, 0, 0, 0);

    Src_size = Size;
    unsigned long rawbufsize = 0;
    acmStreamSize(Pack_Stream, Src_size, &rawbufsize, ACM_STREAMSIZEF_SOURCE);
    rawbufsize += MP3_FRAMES_LAG * 2;
    Uint8 *Pack_Buf = (Uint8 *) malloc(Src_size + (MP3_FRAMES_LAG * 4) + 8);
    memset(Pack_Buf, 0, Src_size + (MP3_FRAMES_LAG * 4) + 8);
    Uint8 *rawbuf = (Uint8 *) malloc(rawbufsize + (MP3_FRAMES_LAG * 4) + 8);
    memset(rawbuf, 0, rawbufsize + (MP3_FRAMES_LAG * 4) + 8);

    ACMSTREAMHEADER Pack_Stream_Head;
    ZeroMemory(&Pack_Stream_Head, sizeof(ACMSTREAMHEADER));
    Pack_Stream_Head.cbStruct = sizeof(ACMSTREAMHEADER);
    Pack_Stream_Head.pbSrc = (Uint8 *) Pack_Buf;
    Pack_Stream_Head.cbSrcLength = Src_size + (MP3_FRAMES_LAG * 2);
    Pack_Stream_Head.pbDst = rawbuf;
    Pack_Stream_Head.cbDstLength = rawbufsize;
    acmStreamPrepareHeader(Pack_Stream, &Pack_Stream_Head, 0);

    memcpy(Pack_Buf, Source, Src_size);

    acmStreamConvert(Pack_Stream, &Pack_Stream_Head, 0);
    Dest_Size = Pack_Stream_Head.cbDstLengthUsed;
    if(Dest_Size < Src_size)
    {
        memcpy(Dest, rawbuf, Dest_Size);
    }
    else
    {
        Dest_Size = 0;
    }

    acmStreamUnprepareHeader(Pack_Stream, &Pack_Stream_Head, 0);
    acmStreamClose(Pack_Stream, 0);
    if(rawbuf) free(rawbuf);
    if(Pack_Buf) free(Pack_Buf);

    return(Dest_Size);
}
Ejemplo n.º 19
0
int ToAT3(short *Source, short *Dest, int Size, int BitRate)
{
    int Src_size;
    int Dest_Size;
    int i;

    Wave_Format.wFormatTag = WAVE_FORMAT_PCM;
    Wave_Format.nChannels = 2;
    Wave_Format.cbSize = 0;
    Wave_Format.wBitsPerSample = 16;
    Wave_Format.nSamplesPerSec = 44100;
    Wave_Format.nBlockAlign = Wave_Format.nChannels * Wave_Format.wBitsPerSample / 8;
    Wave_Format.nAvgBytesPerSec = Wave_Format.nSamplesPerSec * Wave_Format.nBlockAlign;

    At3_Format.wfx.wFormatTag = 0x270;
    At3_Format.wfx.nChannels = 2;
    At3_Format.wfx.nSamplesPerSec = 44100;
    At3_Format.wfx.nAvgBytesPerSec = BitRate * 125;
    switch(BitRate)
    {
        case 66:
            At3_Format.wfx.nBlockAlign = 192;
            break;

        case 105:
            At3_Format.wfx.nBlockAlign = 304;
            break;

        case 132:
            At3_Format.wfx.nBlockAlign = 384;
            break;
    }
    At3_Format.wfx.wBitsPerSample = 0;
    At3_Format.wfx.cbSize = 0xe;
    At3_Format.wRevision = 1;
    At3_Format.nSamplesPerBlock = 0x800;
    At3_Format.abReserved[2] = 1;
    At3_Format.abReserved[4] = 1;
    At3_Format.abReserved[6] = 1;
    acmStreamOpen(&Pack_Stream, NULL, (LPWAVEFORMATEX) &Wave_Format, (LPWAVEFORMATEX) &At3_Format, NULL, 0, 0, 0);

    short *dwSource = (short *) malloc(Size * 2 + 8);
    memset(dwSource, 0, Size * 2 + 8);
    for(i = 0; i < Size / 2; i++)
    {
        dwSource[(i * 2)] = Source[i];
        dwSource[(i * 2) + 1] = 0;
    }

    Source = dwSource;
    Size *= 2;

    Src_size = Size;
    unsigned long rawbufsize = 0;
    acmStreamSize(Pack_Stream, Src_size, &rawbufsize, ACM_STREAMSIZEF_SOURCE);
    Uint8 *Pack_Buf = (Uint8 *) malloc(Src_size + 8);
    memset(Pack_Buf, 0, Src_size + 8);
    Uint8 *rawbuf = (Uint8 *) malloc(rawbufsize + 8);
    memset(rawbuf, 0, rawbufsize + 8);

    ACMSTREAMHEADER Pack_Stream_Head;
    ZeroMemory(&Pack_Stream_Head, sizeof(ACMSTREAMHEADER));
    Pack_Stream_Head.cbStruct = sizeof(ACMSTREAMHEADER);
    Pack_Stream_Head.pbSrc = (Uint8 *) Pack_Buf;
    Pack_Stream_Head.cbSrcLength = Src_size;
    Pack_Stream_Head.pbDst = rawbuf;
    Pack_Stream_Head.cbDstLength = rawbufsize;
    acmStreamPrepareHeader(Pack_Stream, &Pack_Stream_Head, 0);

    memcpy(Pack_Buf, Source, Src_size);

    acmStreamConvert(Pack_Stream, &Pack_Stream_Head, 0);
    Dest_Size = Pack_Stream_Head.cbDstLengthUsed;
    if(Dest_Size < Src_size)
    {
        memcpy(Dest, rawbuf, Dest_Size);
    }
    else
    {
        Dest_Size = 0;
    }

    acmStreamUnprepareHeader(Pack_Stream, &Pack_Stream_Head, 0);
    if(rawbuf) free(rawbuf);
    if(Pack_Buf) free(Pack_Buf);
    if(dwSource) free(dwSource);
    acmStreamClose(Pack_Stream, 0);

    return(Dest_Size);
}
Ejemplo n.º 20
0
void
audio_resampler_acm::open( void )
{


    MMRESULT err;


    //
    // Opens ACM stream
    //

    err = acmStreamOpen( &acm_stream, 0, &wformat_src, &wformat_dst, 
                    0, 0, 0, ACM_STREAMOPENF_NONREALTIME );


    if ( err != MMSYSERR_NOERROR )
    {
        //TODO: throw error
        MessageBox( 0, _T("acmOpen error: %i"), _T("ERROR"), MB_ICONERROR );

    }



    //
    // Calcs source buffer length
    //

    src_buflen = ( unsigned int )
        (( float )audfmt_in.byte_rate() * ( float )buf_secs );


    
    
    
    
    //
    // Calcs destination source buffer length
    // with help of ACM apis
    //

    err = acmStreamSize( acm_stream, 
        src_buflen, &dst_buflen, ACM_STREAMSIZEF_SOURCE );
    

    if ( err != MMSYSERR_NOERROR )
    {
        //TODO: throw error
        MessageBox( 0, _T("acmStreamSize error"), _T("ERROR"), MB_ICONERROR );


    }



    //
    // Initialize ACMSTREAMHEADER structure,
    // and alloc memory for source and destination
    // buffers.
    //

    acm_header.fdwStatus = 0;
    acm_header.dwUser = 0;

    
    acm_header.pbSrc = ( LPBYTE ) new BYTE [ src_buflen ];
    acm_header.cbSrcLength = src_buflen;
    acm_header.cbSrcLengthUsed = 0;
    acm_header.dwSrcUser = src_buflen;


    acm_header.pbDst = ( LPBYTE ) new BYTE [ dst_buflen ];
    acm_header.cbDstLength = dst_buflen;
    acm_header.cbDstLengthUsed = 0;
    acm_header.dwDstUser = dst_buflen;




    //
    // Give ACMSTREAMHEADER initialized correctly to the
    // driver.
    //

    err = acmStreamPrepareHeader( acm_stream, &acm_header, 0L );

    if ( err != MMSYSERR_NOERROR )
    {
        //TODO: throw error
        MessageBox( 0, _T("acmStreamPrepareHeader error"), _T("ERROR"), MB_ICONERROR );


    }




    //
    // ACM stream successfully opened.
    //

    stream_opened = true;

}
Ejemplo n.º 21
0
BOOL CPlayer::WavScanFile()
{
	DWORD dwSamples = 0;
	DWORD dwBuf = 0, dwRead = 0;
	m_Reader.SetPointer(0, FILE_BEGIN);
	WAVEFORMATEX *pwfxSrc, wfxDst;
	pwfxSrc = NULL;
	
	// "RIFF"
	if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) ||
		dwBuf != MAKEFOURCC('R', 'I', 'F', 'F'))
		return FALSE;

	// "WAVE"
	m_Reader.SetPointer(4, FILE_CURRENT);	
	if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) ||
		dwBuf != MAKEFOURCC('W', 'A', 'V', 'E'))
		goto fail;

	// "fmt "
	while (TRUE) {
		if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead))
			goto fail;

		if (dwBuf == MAKEFOURCC('f', 'm', 't', ' '))
			break;

		if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead))
			goto fail;

		if (m_Reader.SetPointer(dwBuf, FILE_CURRENT) == MAXLONGLONG)
			goto fail;
	}

	if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) || dwRead != sizeof(DWORD))
		goto fail;

	pwfxSrc = (WAVEFORMATEX*)new BYTE[dwBuf];
	if (!m_Reader.Read((LPBYTE)pwfxSrc, dwBuf, &dwRead) || dwRead != dwBuf)
		goto fail;

	if (pwfxSrc->nBlockAlign > WAV_FILE_BUFF_LEN)
		goto fail;

#if 1
	memset(&wfxDst, 0, sizeof(wfxDst));
	wfxDst.wFormatTag = WAVE_FORMAT_PCM;
	if (acmFormatSuggest(NULL, pwfxSrc, &wfxDst,
		sizeof(WAVEFORMATEX), ACM_FORMATSUGGESTF_WFORMATTAG) != 0)
		goto fail;
#endif

	// "fact" or "data"
	m_Reader.SetPointer(dwBuf - dwRead, FILE_CURRENT);

	if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead))
		goto fail;

	if (dwBuf == MAKEFOURCC('f', 'a', 'c', 't')) {
		if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) ||
			dwBuf != 4)
			goto fail;
		
		if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) ||
			dwRead != sizeof(DWORD))
			goto fail;

		dwSamples = dwBuf;

		if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) ||
			dwRead != sizeof(DWORD))
			goto fail;
	}

	// "data"
	//if (dwBuf != MAKEFOURCC('d', 'a', 't', 'a'))
	//	goto fail;
retry:
	while (dwBuf != MAKEFOURCC('d', 'a', 't', 'a')) {
		BYTE b;
		if (!m_Reader.Read(&b, sizeof(b), &dwRead) || !dwRead)
			goto fail;
		LPBYTE pb = (LPBYTE)&dwBuf;
		pb[0] = pb[1];
		pb[1] = pb[2];
		pb[2] = pb[3];
		pb[3] = b;
	}

	if (!m_Reader.Read((LPBYTE)&dwBuf, sizeof(dwBuf), &dwRead) || !dwRead)
		goto fail;

	if (dwBuf == 0)
		goto retry;

	// データの準備
	m_dwDataSize = dwBuf;
	m_llDataOffset = m_Reader.SetPointer(0, FILE_CURRENT);
	if (m_dwDataSize > m_Reader.GetSize() - m_llDataOffset) {
		m_dwDataSize = (DWORD)(m_Reader.GetSize() - m_llDataOffset);
	}
	if (pwfxSrc->wFormatTag == WAVE_FORMAT_PCM) {
		dwSamples = m_dwDataSize / (pwfxSrc->nChannels * pwfxSrc->wBitsPerSample / 8);
		wfxDst = *pwfxSrc;
	}
	else if (!dwSamples) {
		dwSamples = m_dwDataSize / pwfxSrc->nAvgBytesPerSec * pwfxSrc->nSamplesPerSec;
	}

	memset(&m_Info, 0, sizeof(m_Info));
	m_Info.nChannels = wfxDst.nChannels;
	m_Info.nSamplingRate = wfxDst.nSamplesPerSec;
	m_Info.nBitRate = pwfxSrc->nAvgBytesPerSec * 8 / 1000;
	m_nDuration = dwSamples;

#if 1
	// ACMを開く
	if (acmStreamOpen(&m_hAcm, NULL, pwfxSrc, &wfxDst, 
						NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME))
		goto fail;
#endif

	m_pwfxSrc = pwfxSrc;
	m_pwfxDst = new WAVEFORMATEX;
	*m_pwfxDst = wfxDst;
	m_dwCurrentSize = 0;

	return TRUE;
fail:
	m_Reader.Close();
	if (pwfxSrc) delete pwfxSrc;
	return FALSE;
}
Ejemplo n.º 22
0
	bool setupAudio()
	{
		int ret;


		//read audio stream info; specifically, we need the encoded chunksize
		AVISTREAMINFO audioStreamInfo;
		AVIStreamInfo(audioStream,&audioStreamInfo,sizeof(AVISTREAMINFO));
		audioChunkSize = audioStreamInfo.dwSuggestedBufferSize;
		audioSampleCount = audioStreamInfo.dwLength;

		audioEncoded = new char[audioChunkSize];

		//read the audio streamformat info
		LONG formatSize;
		AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),0,&formatSize);
		char *format = (char *)malloc(formatSize);
		AVIStreamReadFormat(audioStream,AVIStreamStart(audioStream),format,&formatSize);
		WAVEFORMATEX *wfxEncoded = (WAVEFORMATEX *)format;

		//construct a descriptor for the format we desire to get out of the decoder
		//note that we have to use the same samplerate as the encoded format indicates
		//since acm can't change the samplerate in one fell swoop
		wfxDecoded.cbSize = sizeof(WAVEFORMATEX);
		wfxDecoded.nChannels = wfxEncoded->nChannels;
		wfxDecoded.wFormatTag = WAVE_FORMAT_PCM;
		wfxDecoded.nSamplesPerSec = wfxEncoded->nSamplesPerSec;
		wfxDecoded.wBitsPerSample = 16;
		wfxDecoded.nBlockAlign = wfxDecoded.wBitsPerSample/8 * wfxEncoded->nChannels;
		wfxDecoded.nAvgBytesPerSec = wfxDecoded.nBlockAlign * wfxDecoded.nSamplesPerSec;

		sampleSize = wfxDecoded.nBlockAlign;

		//try to get a converter from the encoded data to the decoded data
		ret = acmStreamOpen(&acmStream,0,wfxEncoded,&wfxDecoded,0,0,0,0);

		//now we're done with wfxEncoded
		free(format);

		if(ret)
		{
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			return false;
		}


		//decide on a playback buffer size
		//make each buffer 1/2sec
		playBufferSamples = wfxDecoded.nSamplesPerSec / 2;
		playBufferSize = playBufferSamples * sampleSize;


		//hurry and try to create the output stream.
		//if we can't do that, then everything that follows is pointless.
		int mode = 0;
		if(wfxDecoded.wBitsPerSample == 8)
			mode |= FSOUND_8BITS;
		else if(wfxDecoded.wBitsPerSample == 16)
			mode |= FSOUND_16BITS;
		if(wfxDecoded.nChannels == 1)
			mode |= FSOUND_MONO;
		else
			mode |= FSOUND_STEREO;

		#ifdef SND_USE_FMOD
		fmod_stream = FSOUND_Stream_Create(win_movie_fmod_streamCallback,playBufferSize,mode,wfxDecoded.nSamplesPerSec,(int)this);
		if(!fmod_stream)
		{
			acmStreamClose(acmStream,0);
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			err("Error creating fmod stream for movieplayback.  Please report this case so we can improve the robustness of the movie player!");
			return false;
		}
		#endif


		//find out how large a decode buffer we need for the encode buffer chunksize
		acmStreamSize(acmStream,audioChunkSize,&decodeBufferSize,ACM_STREAMSIZEF_SOURCE);
		decodeBufferSamples = decodeBufferSize / sampleSize;


		//allocate the decode buffer
		audioDecoded = new char[decodeBufferSize];

		//prep the decode operation
		audioStreamHeader.cbStruct = sizeof(ACMSTREAMHEADER);
		audioStreamHeader.fdwStatus = 0;
		audioStreamHeader.pbSrc = (LPBYTE)audioEncoded;
		audioStreamHeader.cbSrcLength = audioChunkSize;
		audioStreamHeader.pbDst = (LPBYTE)audioDecoded;
		audioStreamHeader.cbDstLength = decodeBufferSize;
		ret = acmStreamPrepareHeader(acmStream,&audioStreamHeader,0);
		if(ret)
		{
			delete[] audioDecoded;
			acmStreamClose(acmStream,0);
			delete[] audioEncoded;
			AVIStreamClose(audioStream);
			return false;
		}

		#ifdef SND_USE_FMOD
		//finally we're ready to start the audio stream
		FSOUND_Stream_Play(FSOUND_FREE,fmod_stream);
		#endif

		return true;
	}
Ejemplo n.º 23
0
BOOL ClSoundDS::AddSoundBuffer(int handle,char *fileBuf,BOOL b3DSound)
{
	HRESULT			hr;
	RiffHead		riffHead;
	FmtHead			fmtHead;
	PCMWAVEFORMAT	pcmFormat;

	LPBYTE			lpBlockAdd1, lpBlockAdd2;
	DWORD			blockSize1, blockSize2;

	CopyMemory(&riffHead,fileBuf,sizeof(RiffHead));
	fileBuf += sizeof(RiffHead);
	if(strncmp(riffHead.riff,"RIFF",4) || strncmp(riffHead.type,"WAVE",4)){
		return FALSE;
	}
	if(NULL==lpDSBufferTop){
		lpDSBufferTop = lpDSBufferTail = new ClSoundBuffer;
	}else{
		lpDSBufferTail->lpNext = new ClSoundBuffer;
		lpDSBufferTail->lpNext->lpPrev = lpDSBufferTail;
		lpDSBufferTail = lpDSBufferTail->lpNext;
	}
	lpDSBufferTail->bufType = memory_sound;
	CopyMemory(&fmtHead,fileBuf,sizeof(FmtHead));
	fileBuf += sizeof(FmtHead);
	CopyMemory(&pcmFormat,fileBuf,sizeof(PCMWAVEFORMAT));
	if(pcmFormat.wf.wFormatTag == WAVE_FORMAT_PCM){
		fileBuf += fmtHead.size;
		CopyMemory(&fmtHead,fileBuf,sizeof(FmtHead));
		fileBuf += sizeof(FmtHead);
		while(0!=strncmp(fmtHead.fmt,"data",4)){
			fileBuf += fmtHead.size;
			CopyMemory(&fmtHead,fileBuf,sizeof(FmtHead));
			fileBuf += sizeof(FmtHead);
		}
		DSBUFFERDESC	dsbdesc;
		WAVEFORMATEX	audioFmt;
		ZeroMemory(&dsbdesc, sizeof(DSBUFFERDESC));
		dsbdesc.dwSize = sizeof(DSBUFFERDESC);
		dsbdesc.dwFlags = DSBCAPS_STATIC | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_LOCSOFTWARE | DSBCAPS_GLOBALFOCUS;
		if(b3DSound) dsbdesc.dwFlags |= (DSBCAPS_CTRL3D | DSBCAPS_MUTE3DATMAXDISTANCE);
		dsbdesc.dwBufferBytes = fmtHead.size;
		CopyMemory(&audioFmt,&pcmFormat,sizeof(WAVEFORMAT));
		audioFmt.wBitsPerSample = pcmFormat.wBitsPerSample;
		audioFmt.cbSize = 0;
		dsbdesc.lpwfxFormat = &audioFmt;
		hr = lpDSound->CreateSoundBuffer(&dsbdesc, &lpTmpBuffer, NULL);
		lpTmpBuffer->QueryInterface(IID_IDirectSoundBuffer8,(LPVOID *)&lpDSBufferTail->lpDSBuffer);
		RELEASE(lpTmpBuffer);
		hr = lpDSBufferTail->lpDSBuffer->Lock(0,fmtHead.size, (LPVOID*)&lpBlockAdd1, &blockSize1,(LPVOID*)&lpBlockAdd2, &blockSize2, 0);
		if(DS_OK==hr){
			CopyMemory(lpBlockAdd1,fileBuf,blockSize1);
			if(fmtHead.size>blockSize1){
				CopyMemory(lpBlockAdd2,fileBuf+blockSize1,blockSize2);
			}
			lpDSBufferTail->lpDSBuffer->Unlock(lpBlockAdd1, blockSize1, lpBlockAdd2, blockSize2);
		}
		lpDSBufferTail->handle = handle;
	}else{
		WAVEFORMATEX	dstWF;
		WAVEFORMATEX	*pwfxInfo;
		WORD			cbExtraAlloc;
		MMRESULT		mmResult;
		fileBuf += sizeof(PCMWAVEFORMAT);
		CopyMemory(&cbExtraAlloc,fileBuf,sizeof(WORD));
		fileBuf += sizeof(WORD);
		pwfxInfo = (WAVEFORMATEX *)cl_malloc(sizeof(WAVEFORMATEX) +cbExtraAlloc);
		CopyMemory(pwfxInfo,&pcmFormat,sizeof(PCMWAVEFORMAT));
		pwfxInfo->cbSize = cbExtraAlloc;
		CopyMemory((char *)pwfxInfo +sizeof(WAVEFORMATEX),fileBuf,cbExtraAlloc);
		fileBuf += (fmtHead.size -sizeof(WAVEFORMATEX));
		CopyMemory(&fmtHead,fileBuf,sizeof(FmtHead));
		fileBuf += sizeof(FmtHead);
		while(0!=strncmp(fmtHead.fmt,"data",4)){
			fileBuf += fmtHead.size;
			CopyMemory(&fmtHead,fileBuf,sizeof(FmtHead));
			fileBuf += sizeof(FmtHead);
		}
		ZeroMemory(&dstWF,sizeof(dstWF));
		dstWF.wFormatTag = WAVE_FORMAT_PCM;
		mmResult = acmFormatSuggest(
			NULL,
			pwfxInfo,
			&dstWF,
			sizeof(dstWF),
			ACM_FORMATSUGGESTF_WFORMATTAG);
		if(mmResult != 0){
			myOutputDebugString("オーディオ圧縮が無効です\n");
			return FALSE;
		}
		HACMSTREAM				hAcm = NULL;
		ACMSTREAMHEADER			ash;
		AcmBuffer				acmDst;
		AcmBuffer				acmSrc;

		acmStreamOpen(&hAcm, NULL,pwfxInfo,&dstWF, NULL, 0L, 0L, ACM_STREAMOPENF_NONREALTIME);
		acmSrc.dwStreamSize = fmtHead.size;
		acmStreamSize(hAcm, acmSrc.dwStreamSize, &acmDst.dwStreamSize, ACM_STREAMSIZEF_SOURCE);
		cl_free(pwfxInfo);
		acmSrc.lpStream = (LPBYTE)cl_malloc(acmSrc.dwStreamSize);
		acmDst.lpStream = (LPBYTE)cl_malloc(acmDst.dwStreamSize);
		ZeroMemory(&ash,sizeof(ash));
		ash.cbStruct		= sizeof(ash);
		ash.pbSrc			= acmSrc.lpStream;
		ash.cbSrcLength		= acmSrc.dwStreamSize;
		ash.dwSrcUser		= acmSrc.dwStreamSize;
		ash.pbDst			= acmDst.lpStream;
		ash.cbDstLength		= acmDst.dwStreamSize;
		ash.dwDstUser		= acmDst.dwStreamSize;
		acmStreamPrepareHeader(hAcm,&ash,0);
		CopyMemory(acmSrc.lpStream,fileBuf,acmSrc.dwStreamSize);
		MMRESULT nError = acmStreamConvert(hAcm,&ash, ACM_STREAMCONVERTF_BLOCKALIGN);
		DSBUFFERDESC	dsbdesc;
		ZeroMemory(&dsbdesc, sizeof(DSBUFFERDESC));
		dsbdesc.dwSize = sizeof(DSBUFFERDESC);
		dsbdesc.dwFlags = DSBCAPS_STATIC | DSBCAPS_CTRLVOLUME | DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_LOCSOFTWARE | DSBCAPS_GLOBALFOCUS;
		if(b3DSound) dsbdesc.dwFlags |= (DSBCAPS_CTRL3D | DSBCAPS_MUTE3DATMAXDISTANCE);
		dsbdesc.dwBufferBytes = ash.cbDstLengthUsed;
		dsbdesc.lpwfxFormat = &dstWF;
		hr = lpDSound->CreateSoundBuffer(&dsbdesc, &lpTmpBuffer, NULL);
		lpTmpBuffer->QueryInterface(IID_IDirectSoundBuffer8,(LPVOID *)&lpDSBufferTail->lpDSBuffer);
		RELEASE(lpTmpBuffer);
		hr = lpDSBufferTail->lpDSBuffer->Lock(0,ash.cbDstLengthUsed, (LPVOID*)&lpBlockAdd1, &blockSize1,(LPVOID*)&lpBlockAdd2, &blockSize2, 0);
		if(DS_OK==hr){
			if(ash.cbDstLengthUsed < blockSize1){
				CopyMemory(lpBlockAdd1, acmDst.lpStream, ash.cbDstLengthUsed);
				ZeroMemory(lpBlockAdd1 +ash.cbDstLengthUsed, blockSize1 -ash.cbDstLengthUsed);
				if(blockSize2)ZeroMemory(lpBlockAdd2,blockSize2);
			}else{
				CopyMemory(lpBlockAdd1, acmDst.lpStream, blockSize1);
				if(blockSize2){
					CopyMemory(lpBlockAdd2, acmDst.lpStream+blockSize1, ash.cbDstLengthUsed -blockSize1);
				}
			}
			lpDSBufferTail->lpDSBuffer->Unlock(lpBlockAdd1, blockSize1, lpBlockAdd2, blockSize2);
		}
		if(hAcm){
			acmStreamUnprepareHeader(hAcm,&ash,0);
			acmStreamClose(hAcm,0);
			cl_free(acmSrc.lpStream);
			cl_free(acmDst.lpStream);
		}
		lpDSBufferTail->handle = handle;
	}
	return TRUE;
} // ClSoundDS::AddSoundBuffer
Ejemplo n.º 24
0
HRESULT CSynthStream::CompleteConnect(IPin *pReceivePin)
{
    // This lock must be held because this function uses
    // m_hPCMToMSADPCMConversionStream, m_fFirstSampleDelivered 
    // and m_llSampleMediaTimeStart.
    CAutoLock lShared(&m_cSharedState);

    HRESULT hr;
    WAVEFORMATEX *pwfexCurrent = (WAVEFORMATEX*)m_mt.Format();

    if(WAVE_FORMAT_PCM == pwfexCurrent->wFormatTag)
    {
        hr = m_Synth->AllocWaveCache(*pwfexCurrent);
        if(FAILED(hr))
        {
            return hr;
        }
    }
    else if(WAVE_FORMAT_ADPCM == pwfexCurrent->wFormatTag)
    {
        WAVEFORMATEX wfexSourceFormat;

        DerivePCMFormatFromADPCMFormatStructure(*pwfexCurrent, &wfexSourceFormat);

        hr = m_Synth->AllocWaveCache(wfexSourceFormat);
        if(FAILED(hr))
        {
            return hr;
        }

        MMRESULT mmr = acmStreamOpen(&m_hPCMToMSADPCMConversionStream,
                                    NULL,
                                    &wfexSourceFormat,
                                    pwfexCurrent,
                                    NULL,
                                    0,
                                    0,
                                    ACM_STREAMOPENF_NONREALTIME);
        // acmStreamOpen() returns 0 if an no errors occur.                              
        if(mmr != 0)
        {
            return E_FAIL;
        }
    }
    else
    {
        ASSERT(NULL == m_hPCMToMSADPCMConversionStream);

    }

    hr = CDynamicSourceStream::CompleteConnect(pReceivePin);
    if(FAILED(hr))
    {
        if(WAVE_FORMAT_ADPCM == pwfexCurrent->wFormatTag)
        {
            // acmStreamClose() should never fail because m_hPCMToMSADPCMConversionStream
            // holds a valid ACM stream handle and all operations using the handle are 
            // synchronous.
            EXECUTE_ASSERT(0 == acmStreamClose(m_hPCMToMSADPCMConversionStream, 0));
            m_hPCMToMSADPCMConversionStream = NULL;
        }

        return hr;
    }

    m_fFirstSampleDelivered = FALSE;
    m_llSampleMediaTimeStart = 0;

    return S_OK;
}
AudioConverterStream::AudioConverterStream(WaveFormat sourceFormat, WaveFormat destFormat, WaveFormat** prevSourceFormats , int numPrevSourceFormats)
    : outBuffer(nullptr), outSize(0), outBufferAllocated(0), stream(nullptr), isProxy(false), m_failed(false), subConverter(nullptr), startOffset(0)
{
    if (sourceFormat.GetSize() == destFormat.GetSize())
    {
        LPWAVEFORMATEX src = static_cast<LPWAVEFORMATEX>(sourceFormat);
        LPWAVEFORMATEX dest = static_cast<LPWAVEFORMATEX>(destFormat);

        if (memcmp(src, dest, sourceFormat.GetSize()) == 0)
        {
            // Source and destination formats are identical, so we'll just pass the input through to
            // the output
            isProxy = true;
            return;
        }
    }

    MMRESULT mm = acmStreamOpen(&stream, nullptr, sourceFormat, destFormat, nullptr, 0, 0, ACM_STREAMOPENF_NONREALTIME);

    if (mm != MMSYSERR_NOERROR)
    {
        // Not supported directly, so try letting ACM suggest an intermediate format, so we can
        // perform the conversion in multiple stages.
        WaveFormat intermediateFormat;

        // This weird looping structure is because we must try all combinations of four flags, and
        // even when a given combination yields a valid suggestion, we must be able to backtrack and
        // continue looping (tryMoreSuggestions) if the conversion based on that suggestion later
        // fails or dead-ends.
        bool foundSuggest = false;
        int chan, samp, bits, form, done;

        for (done = 0; done < 1 && !foundSuggest; foundSuggest ? 0 : ++done)
        {
            for (chan = 1; chan >= 0 && !foundSuggest; foundSuggest ? 0 : --chan)
            {
                for (samp = 1; samp >= 0 && !foundSuggest; foundSuggest ? 0 : --samp)
                {
                    for (bits = 0; bits <= 1 && !foundSuggest; foundSuggest ? 0 : ++bits)
                    {
                        for (form = 0; form <= 1 && !foundSuggest; foundSuggest ? 0 : ++form)
                        {
                            int flags = 0;
                            flags |= chan ? ACM_FORMATSUGGESTF_NCHANNELS      : 0;
                            flags |= samp ? ACM_FORMATSUGGESTF_NSAMPLESPERSEC : 0;
                            flags |= bits ? ACM_FORMATSUGGESTF_WBITSPERSAMPLE : 0;
                            flags |= form ? ACM_FORMATSUGGESTF_WFORMATTAG     : 0;

                            intermediateFormat = destFormat;
                            MMRESULT mmSuggest = acmFormatSuggest(NULL, sourceFormat, intermediateFormat, intermediateFormat.GetSize(), flags);
                            if (mmSuggest == MMSYSERR_NOERROR)
                            {
                                // Got a possibly-valid suggestion, but it might be a suggestion to
                                // do absolutely nothing (which would be bad), so we first make sure
                                // there's some sort of change involved:
                                if (!FormatsMatch(sourceFormat, intermediateFormat))
                                {
                                    // We got a suggestion
                                    foundSuggest = true;

                                    // Now check to see if it's identical to a previous conversion
                                    // state. If it is, then we'll revert foundSuggest to false to
                                    // prevent endless conversion cycles.
                                    for (int prev = 0; prev < numPrevSourceFormats && prevSourceFormats && foundSuggest; prev++)
                                    {
                                        WaveFormat& oldFormat = *prevSourceFormats[prev];

                                        if (FormatsMatch(oldFormat, intermediateFormat))
                                        {
                                            // We already went through this exact format
                                            foundSuggest = false;
                                        }
                                    }
                                }
                            }
tryMoreSuggestions:
                            continue;
                        }
                    }
                }
            }
        }

        if (!foundSuggest)
        {
            m_failed = true;
            return;
        }

        // we'll handle conversion to the intermediate format
        mm = acmStreamOpen(&stream, nullptr, sourceFormat, intermediateFormat, nullptr, 0, 0, ACM_STREAMOPENF_NONREALTIME);
        if (mm != MMSYSERR_NOERROR)
        {
            if (!done)
            {
                foundSuggest = false;
                goto tryMoreSuggestions; // continue the search
            }

            // reached dead end
            m_failed = true;
            return;
        }

        // create temporary updated conversion history for cycle prevention
        size_t prevSize = sizeof(WaveFormat*) * (numPrevSourceFormats + 1);
        WaveFormat** prevFormats = static_cast<WaveFormat**>(alloca(prevSize));

        if (prevSourceFormats)
        {
            memcpy(prevFormats, prevSourceFormats, prevSize);
        }

        prevFormats[numPrevSourceFormats] = &sourceFormat;

        // delegate the rest of the conversion to a new converter (recursive construction)
        subConverter = new AudioConverterStream(intermediateFormat, destFormat, prevFormats, numPrevSourceFormats + 1);

        if (subConverter->m_failed)
        {
            delete subConverter;
            subConverter = nullptr;

            if (!done)
            {
                foundSuggest = false;
                goto tryMoreSuggestions; // continue the search
            }

            // reached dead end
            m_failed = true;
            return;
        }
    }

    // prepare the stream header
    memset(&header, 0, sizeof(ACMSTREAMHEADER));
    header.cbStruct = sizeof(ACMSTREAMHEADER);
    header.pbSrc = inWorkBuffer;
    header.cbSrcLength = sizeof(inWorkBuffer);
    header.pbDst = outWorkBuffer;
    header.cbDstLength = sizeof(outWorkBuffer);
    mm = acmStreamPrepareHeader(stream, &header, 0);

    if (mm != MMSYSERR_NOERROR)
    {
        m_failed = true;
    }
}
Ejemplo n.º 26
0
static HRESULT AVIFILE_SaveFile(const IAVIFileImpl *This)
{
  MMCKINFO ckRIFF;
  MMCKINFO ck;

  mmioSeek(This->hmmio, 0, SEEK_SET);

  /* create the RIFF chunk with formtype WAVE */
  ckRIFF.fccType = formtypeWAVE;
  ckRIFF.cksize  = 0;
  if (mmioCreateChunk(This->hmmio, &ckRIFF, MMIO_CREATERIFF) != S_OK)
    return AVIERR_FILEWRITE;

  /* the next chunk is the format */
  ck.ckid   = ckidWAVEFORMAT;
  ck.cksize = This->cbFormat;
  if (mmioCreateChunk(This->hmmio, &ck, 0) != S_OK)
    return AVIERR_FILEWRITE;
  if (This->lpFormat != NULL && This->cbFormat > 0) {
    if (mmioWrite(This->hmmio, (HPSTR)This->lpFormat, ck.cksize) != ck.cksize)
      return AVIERR_FILEWRITE;
  }
  if (mmioAscend(This->hmmio, &ck, 0) != S_OK)
    return AVIERR_FILEWRITE;

  /* fact chunk is needed for non-pcm waveforms */
  if (This->lpFormat != NULL && This->cbFormat > sizeof(PCMWAVEFORMAT) &&
      This->lpFormat->wFormatTag != WAVE_FORMAT_PCM) {
    WAVEFORMATEX wfx;
    DWORD        dwFactLength;
    HACMSTREAM   has;

    /* try to open an appropriate audio codec to figure out
     * data for fact-chunk */
    wfx.wFormatTag = WAVE_FORMAT_PCM;
    if (acmFormatSuggest(NULL, This->lpFormat, &wfx,
			 sizeof(wfx), ACM_FORMATSUGGESTF_WFORMATTAG)) {
      acmStreamOpen(&has, NULL, This->lpFormat, &wfx, NULL,
		    0, 0, ACM_STREAMOPENF_NONREALTIME);
      acmStreamSize(has, This->ckData.cksize, &dwFactLength,
		    ACM_STREAMSIZEF_SOURCE);
      dwFactLength /= wfx.nBlockAlign;
      acmStreamClose(has, 0);

      /* create the fact chunk */
      ck.ckid   = ckidWAVEFACT;
      ck.cksize = sizeof(dwFactLength);

      /* test for enough space before data chunk */
      if (mmioSeek(This->hmmio, 0, SEEK_CUR) > This->ckData.dwDataOffset
	  - ck.cksize - 4 * sizeof(DWORD))
	return AVIERR_FILEWRITE;
      if (mmioCreateChunk(This->hmmio, &ck, 0) != S_OK)
	return AVIERR_FILEWRITE;
      if (mmioWrite(This->hmmio, (HPSTR)&dwFactLength, ck.cksize) != ck.cksize)
	return AVIERR_FILEWRITE;
      if (mmioAscend(This->hmmio, &ck, 0) != S_OK)
	return AVIERR_FILEWRITE;
    } else
      ERR(": fact chunk is needed for non-pcm files -- currently no codec found, so skipped!\n");
  }

  /* if there was extra stuff, we need to fill it with JUNK */
  if (mmioSeek(This->hmmio, 0, SEEK_CUR) + 2 * sizeof(DWORD) < This->ckData.dwDataOffset) {
    ck.ckid   = ckidAVIPADDING;
    ck.cksize = 0;
    if (mmioCreateChunk(This->hmmio, &ck, 0) != S_OK)
      return AVIERR_FILEWRITE;

    if (mmioSeek(This->hmmio, This->ckData.dwDataOffset
		 - 2 * sizeof(DWORD), SEEK_SET) == -1)
      return AVIERR_FILEWRITE;
    if (mmioAscend(This->hmmio, &ck, 0) != S_OK)
      return AVIERR_FILEWRITE;
  }

  /* create the data chunk */
  ck.ckid   = ckidWAVEDATA;
  ck.cksize = This->ckData.cksize;
  if (mmioCreateChunk(This->hmmio, &ck, 0) != S_OK)
    return AVIERR_FILEWRITE;
  if (mmioSeek(This->hmmio, This->ckData.cksize, SEEK_CUR) == -1)
    return AVIERR_FILEWRITE;
  if (mmioAscend(This->hmmio, &ck, 0) != S_OK)
    return AVIERR_FILEWRITE;

  /* some optional extra chunks? */
  if (This->extra.lp != NULL && This->extra.cb > 0) {
    /* chunk headers are already in structure */
    if (mmioWrite(This->hmmio, This->extra.lp, This->extra.cb) != This->extra.cb)
      return AVIERR_FILEWRITE;
  }

  /* close RIFF chunk */
  if (mmioAscend(This->hmmio, &ckRIFF, 0) != S_OK)
    return AVIERR_FILEWRITE;
  if (mmioFlush(This->hmmio, 0) != S_OK)
    return AVIERR_FILEWRITE;

  return AVIERR_OK;
}
Ejemplo n.º 27
0
bool VDAudioCodecW32::Init(const WAVEFORMATEX *pSrcFormat, const WAVEFORMATEX *pDstFormat, bool isCompression, const char *pDriverShortNameHint, bool throwOnError) {
	Shutdown();

	SafeCopyWaveFormat(mSrcFormat, (const VDWaveFormat *)pSrcFormat);

	if (pDstFormat)
		SafeCopyWaveFormat(mDstFormat, (const VDWaveFormat *)pDstFormat);

	// enumerate IDs for all installed codecs
	ACMDriverList driverList(pDriverShortNameHint);

	// try one driver at a time
	MMRESULT res = 0;

	for(ACMDriverList::const_iterator it(driverList.begin()), itEnd(driverList.end());
		it != itEnd;
		++it)
	{
		const HACMDRIVERID driverId = *it;

		// open driver
		HACMDRIVER hDriver = NULL;
		if (acmDriverOpen(&hDriver, *it, 0))
			continue;

		if (!pDstFormat) {
			VDASSERT(!isCompression);
		
			DWORD dwDstFormatSize = 0;

			VDVERIFY(!acmMetrics(NULL, ACM_METRIC_MAX_SIZE_FORMAT, (LPVOID)&dwDstFormatSize));

			if (dwDstFormatSize < sizeof(WAVEFORMATEX))
				dwDstFormatSize = sizeof(WAVEFORMATEX);

			mDstFormat.resize(dwDstFormatSize);
			memset(mDstFormat.data(), 0, dwDstFormatSize);
			mDstFormat->mTag = WAVE_FORMAT_PCM;

			if (acmFormatSuggest(hDriver, (WAVEFORMATEX *)pSrcFormat, (WAVEFORMATEX *)mDstFormat.data(), dwDstFormatSize, ACM_FORMATSUGGESTF_WFORMATTAG)) {
				acmDriverClose(hDriver, NULL);
				continue;
			}

			// sanitize the destination format a bit

			if (mDstFormat->mSampleBits != 8 && mDstFormat->mSampleBits != 16)
				mDstFormat->mSampleBits = 16;

			if (mDstFormat->mChannels != 1 && mDstFormat->mChannels !=2)
				mDstFormat->mChannels = 2;

			mDstFormat->mBlockSize		= (uint16)((mDstFormat->mSampleBits >> 3) * mDstFormat->mChannels);
			mDstFormat->mDataRate		= mDstFormat->mBlockSize * mDstFormat->mSamplingRate;
			mDstFormat->mExtraSize		= 0;
			mDstFormat.resize(sizeof(WAVEFORMATEX));
		}

		// open conversion stream
		res = acmStreamOpen(&mhStream, hDriver, (WAVEFORMATEX *)pSrcFormat, (WAVEFORMATEX *)mDstFormat.data(), NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME);
		if (!res) {
			mhDriver = hDriver;
			break;
		}

		// Aud-X accepts PCM/6ch but not WAVE_FORMAT_EXTENSIBLE/PCM/6ch. Argh. We attempt to work
		// around this by trying a PCM version if WFE doesn't work.
		if (isCompression) {
			// Need to put this somewhere.
			struct WaveFormatExtensibleW32 {
				WAVEFORMATEX mFormat;
				union {
					uint16 mBitDepth;
					uint16 mSamplesPerBlock;		// may be zero, according to MSDN
				};
				uint32	mChannelMask;
				GUID	mGuid;
			};

			static const GUID local_KSDATAFORMAT_SUBTYPE_PCM={	// so we don't have to bring in ksmedia.h
				WAVE_FORMAT_PCM, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71
			};

			if (pSrcFormat->wFormatTag == WAVE_FORMAT_EXTENSIBLE && pSrcFormat->cbSize >= sizeof(WaveFormatExtensibleW32) - sizeof(WAVEFORMATEX)) {
				const WaveFormatExtensibleW32& wfexex = *(const WaveFormatExtensibleW32 *)pSrcFormat;

				if (wfexex.mGuid == local_KSDATAFORMAT_SUBTYPE_PCM) {
					// Rewrite the format to be straight PCM and try again.
					vdstructex<VDWaveFormat> srcFormat2(mSrcFormat.data(), sizeof(VDWaveFormat));
					srcFormat2->mExtraSize	= 0;
					srcFormat2->mTag		= WAVE_FORMAT_PCM;
					MMRESULT res2 = acmStreamOpen(&mhStream, hDriver, (WAVEFORMATEX *)srcFormat2.data(), (WAVEFORMATEX *)mDstFormat.data(), NULL, 0, 0, ACM_STREAMOPENF_NONREALTIME);

					if (!res2) {
						res = res2;
						mSrcFormat = srcFormat2;
						pSrcFormat = (WAVEFORMATEX *)mSrcFormat.data();

						mhDriver = hDriver;
						break;
					}
				}
			}
		}

		acmDriverClose(hDriver, 0);
	}