nsPrintSettingsAndroid()
 {
   // The aim here is to set up the objects enough that silent printing works
   SetOutputFormat(nsIPrintSettings::kOutputFormatPDF);
   SetPrinterName(NS_LITERAL_STRING("PDF printer").get());
   
 }
示例#2
0
status_t
EqualizerNode::PrepareToConnect(const media_source &src,
                                const media_destination &dst, media_format* format, media_source* outSource,
                                char* outName)
{
    if (src != fOutputMedia.source)
        return B_MEDIA_BAD_SOURCE;

    if (format->type != B_MEDIA_RAW_AUDIO)
        return B_MEDIA_BAD_FORMAT;

    if (fOutputMedia.destination != media_destination::null)
        return B_MEDIA_ALREADY_CONNECTED;

    status_t err = ValidateFormat((fFormat.u.raw_audio.format
                                   != media_raw_audio_format::wildcard.format) ? fFormat
                                  : fPreferredFormat, *format);

    if (err < B_OK)
        return err;

    SetOutputFormat(*format);

    fOutputMedia.destination = dst;
    fOutputMedia.format = *format;

    *outSource = fOutputMedia.source;
    strncpy(outName, fOutputMedia.name, B_MEDIA_NAME_LENGTH);

    return B_OK;
}
示例#3
0
	bool GetFeatureRequest::Create(rude::CGI& cgi, Map* pMap)
	{
		WebContext* pWebContext = augeGetWebContextInstance();
		char parameter[AUGE_NAME_MAX];

		SetVersion(cgi["version"]);

		//auge_web_parameter_encoding(, parameter, AUGE_NAME_MAX, pWebContext->IsIE());
		SetTypeName(cgi["typeName"],true);
		
		auge_web_parameter_encoding(cgi["sourceName"], parameter, AUGE_NAME_MAX, pWebContext->IsIE());
		SetSourceName(parameter);

		//auge_web_parameter_encoding(cgi["mapName"], parameter, AUGE_NAME_MAX, pWebContext->IsIE());
		SetMapName(cgi["mapName"], true);

		SetOutputFormat(cgi["outputFormat"]);
		SetMaxFeatures(cgi["maxFeatures"]);
		SetOffset(cgi["offset"]);
		SetBBox(cgi["bbox"]);

		SetEncoding(cgi["encoding"]);

		m_filter = cgi["filter"];
		m_fields = cgi["fields"];
		//if(!m_extent.IsValid())
		//{
		//	SetQuery(cgi["filter"],cgi["fields"], GetTypeName(), pMap);
		//}

		return true;
	}
示例#4
0
void FFMS_AudioSource::Init(const FFMS_Index &Index, int DelayMode) {
	// Decode the first packet to ensure all properties are initialized
	// Don't cache it since it might be in the wrong format
	// Instead, leave it in DecodeFrame and it'll get cached later
	while (DecodeFrame->nb_samples == 0)
		DecodeNextBlock();

	// Read properties of the audio which may not be available until the first
	// frame has been decoded
	FillAP(AP, CodecContext, Frames);

	if (AP.SampleRate <= 0 || AP.BitsPerSample <= 0)
		throw FFMS_Exception(FFMS_ERROR_DECODING, FFMS_ERROR_CODEC,
			"Codec returned zero size audio");

	std::auto_ptr<FFMS_ResampleOptions> opt(CreateResampleOptions());
	SetOutputFormat(opt.get());

	if (DelayMode < FFMS_DELAY_NO_SHIFT)
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Bad audio delay compensation mode");

	if (DelayMode == FFMS_DELAY_NO_SHIFT) return;

	if (DelayMode > (signed)Index.size())
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Out of bounds track index selected for audio delay compensation");

	if (DelayMode >= 0 && Index[DelayMode].TT != FFMS_TYPE_VIDEO)
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Audio delay compensation must be relative to a video track");

	int64_t Delay = 0;
	if (DelayMode != FFMS_DELAY_TIME_ZERO) {
		if (DelayMode == FFMS_DELAY_FIRST_VIDEO_TRACK) {
			for (size_t i = 0; i < Index.size(); ++i) {
				if (Index[i].TT == FFMS_TYPE_VIDEO && !Index[i].empty()) {
					DelayMode = i;
					break;
				}
			}
		}

		if (DelayMode >= 0) {
			const FFMS_Track &VTrack = Index[DelayMode];
			Delay = -(VTrack[0].PTS * VTrack.TB.Num * AP.SampleRate / (VTrack.TB.Den * 1000));
		}
	}

	if (Frames.HasTS) {
		int i = 0;
		while (Frames[i].PTS == ffms_av_nopts_value) ++i;
		Delay += Frames[i].PTS * Frames.TB.Num * AP.SampleRate / (Frames.TB.Den * 1000);
		for (; i >= 0; --i)
			Delay -= Frames[i].SampleCount;
	}

	AP.NumSamples += Delay;
}
示例#5
0
// Format negotiation
HRESULT CTimeStretchFilter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

#ifdef INTEGER_SAMPLES
  // only accept 16bit int
  if (pwfx->Format.wBitsPerSample != 16 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_PCM)
    return VFW_E_TYPE_NOT_ACCEPTED;
#else 
  // only accept 32bit float
  if (pwfx->Format.wBitsPerSample != 32 || pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
    return VFW_E_TYPE_NOT_ACCEPTED;
#endif

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  bool bApplyChanges = (nApplyChangesDepth != 0);
  if (nApplyChangesDepth != INFINITE && nApplyChangesDepth > 0)
    nApplyChangesDepth--;

  HRESULT hr = m_pNextSink->NegotiateFormat(pwfx, nApplyChangesDepth, pChOrder);
  if (FAILED(hr))
    return hr;

  hr = VFW_E_CANNOT_CONNECT;
  
  if (!pwfx)
    return SetFormat(NULL);

  if (bApplyChanges)
  {
    LogWaveFormat(pwfx, "TS   - applying ");

    AM_MEDIA_TYPE tmp;
    HRESULT result = CreateAudioMediaType((WAVEFORMATEX*)pwfx, &tmp, true);
    if (SUCCEEDED(result))
    {
      if (m_pMediaType)
        DeleteMediaType(m_pMediaType);
      m_pMediaType = CreateMediaType(&tmp);
    }

    SetInputFormat(pwfx);
    SetOutputFormat(pwfx);
    SetFormat(pwfx);
  }
  else
    LogWaveFormat(pwfx, "TS   -          ");

  m_chOrder = *pChOrder;

  return S_OK;
}
示例#6
0
ieResult ie_fif_Reader::ReadImageX(ieImage *pim, bool bLoadSmall, iePProgress pProgress, volatile bool *pbCancel)
{
	long	lCH;
    if (OpenDecompressor(&lCH)) return IE_E_GENERAL;

	if (SetFIFBuffer(lCH, (PBYTE)pMem->Ptr(), (long)pMem->Size())) return IE_E_GENERAL;

	// Init file
    TCHAR	szFTTFileName[256] = _T("");
    PBYTE	pbFTT = nullptr;
	DWORD	dw;
    IE_HFILE	hf;
    if (GetFIFFTTFileName(lCH, (PBYTE)szFTTFileName)) return IE_E_GENERAL;
    if (*szFTTFileName != 0) {
		hf = CreateFile(szFTTFileName, GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
		if (hf == IE_INVALIDHFILE) return IE_E_FILEOPEN;
        dw = (DWORD)ief_Size(hf);
        pbFTT = IE_NEW BYTE[dw];
		if (!pbFTT) return IE_E_OUTOFMEMORY;
		ief_Read(hf, pbFTT, dw);
        ief_Close(hf);
        if (SetFTTBuffer(lCH, pbFTT, dw)) return IE_E_GENERAL;
    }

	// Set options
    if (SetOutputFormat(lCH, BLUE8, GREEN8, RED8, BLANK8, TOP_LEFT)) return IE_E_GENERAL;
    if (SetOutputResolution(lCH, pim->X(), pim->Y())) return IE_E_GENERAL;

	// Decompress
	if (pProgress) {
        pProgressCB = pProgress;
	    SetDecompressCallback(lCH, &FIFCallback, CALLBACK_FREQ_LOW);
    }

	if (DecompressToBuffer(lCH, pim->RawPixelPtr(), 0, 0, 0, 0, pim->Pitch() * sizeof(ieBGRA))) return IE_E_CORRUPTEDFILE;

	// Clean up
    if (pbFTT) {
    	ClearFTTBuffer(lCH);
        delete[] pbFTT;
    }
    ClearFIFBuffer(lCH);
    CloseDecompressor(lCH);

	// Set alpha to 1.0
	iePBGRA p4 = pim->BGRA()->PixelPtr();
	DWORD nSkip = (pim->BGRA()->Pitch() - nX);
	for (DWORD y = nY; y--; p4 += nSkip)
		for (DWORD x = nX; x--; p4++)
			p4->A = 0xFF;

	return IE_S_OK;
}
示例#7
0
STDMETHODIMP EMReadFrame::SetOutputFormat(long p_vDestSizeX, long p_vDestSizeY, long p_vSkipSize)
{
	EMMediaFormat oFormat(EM_TYPE_RAW_VIDEO);
	
	oFormat.m_vWidth = m_vDestinationSizeX = p_vDestSizeX;
	oFormat.m_vHeight = m_vDestinationSizeY = p_vDestSizeY;
	oFormat.m_vBytesPerRow = m_vBytesPerDestinationPixel = p_vSkipSize;
	oFormat.m_vBufferSizeBytes = m_vBytesPerFrame =  p_vDestSizeY * p_vSkipSize;

	SetOutputFormat(&oFormat);

	return S_OK;
}
void CSTi7111HDFormatterOutput::SetControl(stm_output_control_t ctrl, ULONG val)
{
  DEBUGF2(2,("CSTi7111HDFormatterOutput::SetControl ctrl = %u val = %lu\n",(unsigned)ctrl,val));
  switch(ctrl)
  {
    case STM_CTRL_SIGNAL_RANGE:
    {
      CGenericGammaOutput::SetControl(ctrl,val);
      /*
       * Enable the change in the HD formatter
       */
      SetOutputFormat(m_ulOutputFormat);
      break;
    }
    case STM_CTRL_DAC_HD_POWER:
    {
      m_bDacHdPowerDisabled = (val != 0);
      if (m_pCurrentMode)
      {
        if(!m_bDacHdPowerDisabled)
          EnableDACs();
        else
        {
          val = ReadMainTVOutReg(TVOUT_PADS_CTL) | TVOUT_MAIN_PADS_DAC_POFF;
          WriteMainTVOutReg(TVOUT_PADS_CTL, val);
        }
      }
      break;
    }
    case STM_CTRL_DAC_HD_FILTER:
    {
      m_bUseAlternate2XFilter = (val != 0);
      if(m_pCurrentMode && (m_ulTVStandard & STM_OUTPUT_STD_HD_MASK))
      {
        if(m_pCurrentMode->TimingParams.ulPixelClock <= 74250000)
          SetUpsampler(2);
      }
      break;
    }
    case STM_CTRL_YCBCR_COLORSPACE:
    {
      if(m_pHDMI)
        m_pHDMI->SetControl(ctrl,val);
      // Fallthrough to base class in order to configure the mixer
    }
    default:
      CGenericGammaOutput::SetControl(ctrl,val);
      break;
  }
}
示例#9
0
    dsnerror_t SetOutputType(void)
    {
        m_pDestType.majortype = MEDIATYPE_Video;
        m_pDestType.bFixedSizeSamples = TRUE;
        m_pDestType.bTemporalCompression = FALSE;
        m_pDestType.pUnk = 0;

        memset(&m_vi, 0, sizeof(m_vi));
        memcpy(&m_vi.bmiHeader, m_bih, sizeof(m_vi.bmiHeader));

        memset(&m_vi2, 0, sizeof(m_vi2));
        memcpy(&m_vi2.bmiHeader, m_bih, sizeof(m_vi2.bmiHeader));

        m_vi.bmiHeader.biSize = m_vi2.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
        m_vi.bmiHeader.biCompression = m_vi2.bmiHeader.biCompression = m_outfmt;

        m_vi.bmiHeader.biPlanes = 1;

        /* Check if we support the desidered output format */
        if (!SetOutputFormat(&m_vi.bmiHeader.biBitCount, &m_vi.bmiHeader.biPlanes))
            return DSN_OUTPUT_NOTSUPPORTED;

        m_vi2.bmiHeader.biBitCount = m_vi.bmiHeader.biBitCount;
        m_vi2.bmiHeader.biPlanes = m_vi.bmiHeader.biPlanes;

        RECT rImg = { 0 /* left */, 0 /* top */, m_bih->biWidth /* right */, m_bih->biHeight /* bottom */};
        m_vi.rcSource = m_vi2.rcSource = m_vi.rcTarget = m_vi2.rcTarget = rImg;

        //m_vi2.bmiHeader.biHeight *= -1;

        m_vi.bmiHeader.biSizeImage = m_pDestType.lSampleSize = labs(m_bih->biWidth * m_bih->biHeight * ((m_vi.bmiHeader.biBitCount + 7) / 8));
        m_vi2.bmiHeader.biSizeImage = m_vi.bmiHeader.biSizeImage;

        // try FORMAT_VideoInfo
        m_pDestType.formattype = FORMAT_VideoInfo;
        m_pDestType.cbFormat = sizeof(m_vi);
        m_pDestType.pbFormat = (BYTE *) &m_vi;
        m_res = m_pOutputPin->QueryAccept(&m_pDestType);

        // try FORMAT_VideoInfo2
        if (m_res != S_OK)
        {
            m_pDestType.formattype = FORMAT_VideoInfo2;
            m_pDestType.cbFormat = sizeof(m_vi2);
            m_pDestType.pbFormat = (BYTE *) &m_vi2;
            m_res = m_pOutputPin->QueryAccept(&m_pDestType);
        }
        return (m_res == S_OK) ? DSN_OK : DSN_OUTPUT_NOTACCEPTED;
    }
示例#10
0
/** ---------------------------------------------------
 */
nsPrintSettingsGTK::nsPrintSettingsGTK() :
    mPageSetup(nullptr),
    mPrintSettings(nullptr),
    mGTKPrinter(nullptr),
    mPrintSelectionOnly(false)
{
    // The aim here is to set up the objects enough that silent printing works well.
    // These will be replaced anyway if the print dialog is used.
    mPrintSettings = gtk_print_settings_new();
    mPageSetup = gtk_page_setup_new();
    InitUnwriteableMargin();

    SetOutputFormat(nsIPrintSettings::kOutputFormatNative);

    GtkPaperSize* defaultPaperSize = gtk_paper_size_new(nullptr);
    mPaperSize = moz_gtk_paper_size_copy_to_new_custom(defaultPaperSize);
    gtk_paper_size_free(defaultPaperSize);
    SaveNewPageSize();
}
示例#11
0
文件: main.cpp 项目: 2lnx/bco
int32_t main (int32_t argc, char * argv[]) 
{
    char * inputFileName = argv[1];
    char * outputFileName = argv[2];
    FILE * inputFile = NULL;
    FILE * outputFile = NULL;
		
	bool malformed = argc < 2;
	
    // Parse the commandline and open the necessary files
    for (int32_t i = 1; i < argc; ++i) 
	{
		if (strcmp (argv[i], "-h") == 0)
		{
			malformed = true;
		}
		else
		{
			if (argv[i][0] == '-')
            {
				printf ("unknown option: %s\n", argv[i]);
				malformed = true;
			}
			else
			{
                if (inputFile == NULL) inputFile = fopen (inputFileName, "rb"); // the b is necessary for Windows -- ignored by Unix
                if(inputFile == NULL)
                {
                    fprintf(stderr," Cannot open file \"%s\"\n", inputFileName);
                    exit (1);
                }

                if (outputFile == NULL) outputFile = fopen (outputFileName, "w+b"); // the b is necessary for Windows -- ignored by Unix
                if(outputFile == NULL)
                {
                    fprintf(stderr," Cannot open file \"%s\"\n", outputFileName);
                    exit (1);
                }
			}
		}
				
		if (malformed)
		{
			break;
		}
	}
	
	if (!malformed) 
	{
        printf("Input file: %s\n", inputFileName);
        printf("Output file: %s\n", outputFileName);
        // So at this point we have the input and output files open. Need to determine what we're dealing with
        int32_t theError = 0;
        AudioFormatDescription inputFormat;
        AudioFormatDescription outputFormat;
        int32_t inputDataPos = 0, inputDataSize = 0;
        uint32_t inputFileType = 0; // 'caff' or 'WAVE'
        uint32_t outputFileType = 0; // 'caff' or 'WAVE'
        
        theError = GetInputFormat(inputFile, &inputFormat, &inputFileType);
        if (theError)
        {
            fprintf(stderr," Cannot determine what format file \"%s\" is\n", inputFileName);
            exit (1);            
        }
        
        if (inputFileType != 'WAVE' && inputFileType != 'caff')
        {
            fprintf(stderr," File \"%s\" is of an unsupported type\n", outputFileName);
            exit (1);                        
        }
        
        if (inputFormat.mFormatID != kALACFormatAppleLossless && inputFormat.mFormatID != kALACFormatLinearPCM)
        {
            fprintf(stderr," File \"%s\'s\" data format is of an unsupported type\n", outputFileName);
            exit (1);                        
        }

        SetOutputFormat(inputFormat, &outputFormat);

        if (theError)
        {
            fprintf(stderr," Cannot determine what format file \"%s\" is\n", outputFileName);
            exit (1);            
        }
        FindDataStart(inputFile, inputFileType, &inputDataPos, &inputDataSize);
        fseek(inputFile, inputDataPos, SEEK_SET);
        
        // We know where we are and we know what we're doing
        if (outputFormat.mFormatID == kALACFormatAppleLossless)
        {
            // encoding
            EncodeALAC(inputFile, outputFile, inputFormat, outputFormat, inputDataSize);
        }
        else
        {
            // decoding
            GetOutputFileType(outputFileName, &outputFileType);
            
            if (outputFileType == 'WAVE' && outputFormat.mChannelsPerFrame > 2)
            {
                // we don't support WAVE because we don't want to reinterleave on output 
                fprintf(stderr," Cannot decode more than two channels to WAVE\n");
                exit (1);            
            }
            DecodeALAC(inputFile, outputFile, inputFormat, outputFormat, inputDataSize, outputFileType);
        }
	}
	
	if (malformed) {
		printf ("Usage:\n");
		printf ("Encode:\n");
		printf ("        alacconvert <input wav or caf file> <output caf file>\n");
		printf ("Decode:\n");
		printf ("        alacconvert <input caf file> <output wav or caf file>\n");
		printf ("\n");
		return 1;
	}
	
    if (inputFile) fclose(inputFile);
    if (outputFile) fclose(outputFile);
    
	return 0;
}
示例#12
0
	bool GetFeatureRequest::Create(XDocument* pxDoc)
	{
		XElement	*pxRoot = NULL;
		XAttribute	*pxAttr = NULL;

		m_pxDoc = pxDoc;

		pxRoot = pxDoc->GetRootNode();
		pxAttr = pxRoot->GetAttribute("version");
		if(pxAttr!=NULL)
		{
			SetVersion(pxAttr->GetValue());
		}

		pxAttr = pxRoot->GetAttribute("mapName");
		if(pxAttr!=NULL)
		{
			SetMapName(pxAttr->GetValue(),false);
		}

		pxAttr = pxRoot->GetAttribute("sourceName");
		if(pxAttr!=NULL)
		{
			SetSourceName(pxAttr->GetValue());
		}

		pxAttr = pxRoot->GetAttribute("outputFormat");
		if(pxAttr!=NULL)
		{
			GLogger* pLogger  = augeGetLoggerInstance();
			pLogger->Trace(pxAttr->GetValue(),__FILE__,__LINE__);
			SetOutputFormat(pxAttr->GetValue());
		}

		pxAttr = pxRoot->GetAttribute("maxFeatures");
		if(pxAttr!=NULL)
		{
			SetMaxFeatures(pxAttr->GetValue());
		}

		pxAttr = pxRoot->GetAttribute("offset");
		if(pxAttr!=NULL)
		{
			SetOffset(pxAttr->GetValue());
		}

		pxAttr = pxRoot->GetAttribute("encoding");
		if(pxAttr!=NULL)
		{
			SetEncoding(pxAttr->GetValue());
		}

		XElement* pxQuery = (XElement*)pxRoot->GetFirstChild("Query");
		if(pxQuery==NULL)
		{
			return NULL;
		}

		pxAttr = pxQuery->GetAttribute("typeName");
		if(pxAttr==NULL)
		{
			return false;
		}
		SetTypeName(pxAttr->GetValue(), false);
		if(m_type_name.empty())
		{
			return false;
		}

		//Layer* pLayer = NULL;
		//pLayer = pMap->GetLayer(m_type_name.c_str());
		//if(pLayer==NULL)
		//{
		//	return false;
		//}
		//if(pLayer->GetType()!=augeLayerFeature)
		//{
		//	return false;
		//}
		//FeatureLayer* pFLayer = NULL;
		//FeatureClass* pFeatureClass = NULL;
		//pFLayer = static_cast<FeatureLayer*>(pLayer);
		//pFeatureClass = pFLayer->GetFeatureClass();
		//if(pFeatureClass==NULL)
		//{
		//	return false;
		//}

		//FilterFactory* pFilterFactory = augeGetFilterFactoryInstance();
		//m_pQuery = pFilterFactory->CreateQuery();

		//XElement* pxFilter = (XElement*)pxQuery->GetFirstChild("Filter");
		//if(pxFilter!=NULL)
		//{
		//	GFilter* pFilter = NULL;
		//	FilterReader* reader = pFilterFactory->CreateFilerReader(pFeatureClass->GetFields());
		//	pFilter = reader->Read(pxFilter);
		//	m_pQuery->SetFilter(pFilter);

		//}

		////PropertyName
		//char field_name[AUGE_NAME_MAX];
		//const char* property_name;
		//XNode* pxNode = NULL;
		//XNodeSet* pxNodeSet = pxQuery->GetChildren("PropertyName");
		//pxNodeSet->Reset();
		//while((pxNode=pxNodeSet->Next())!=NULL)
		//{
		//	property_name = pxNode->GetContent();
		//	ParseFieldName(property_name, field_name, AUGE_NAME_MAX);
		//	m_pQuery->AddSubField(field_name);

		//}
		//pxNodeSet->Release();

		return true;
	}
示例#13
0
bool CSTi7111HDFormatterOutput::Start(const stm_mode_line_t *mode, ULONG tvStandard)
{
  DEBUGF2(2,("CSTi7111HDFormatterOutput::Start - in\n"));

  /*
   * Strip any secondary mode flags out when SMPTE293M is defined.
   * Other chips need additional modes for simultaneous re-interlaced DENC
   * output. Stripping that out lets us use tvStandard more sensibly throughout,
   * making the code more readable.
   */
  if(tvStandard & STM_OUTPUT_STD_SMPTE293M)
    tvStandard = STM_OUTPUT_STD_SMPTE293M;

  if((mode->ModeParams.OutputStandards & tvStandard) != tvStandard)
  {
    DEBUGF2(1,("CSTi7111HDFormatterOutput::Start - requested standard not supported by mode\n"));
    return false;
  }

  if(m_bIsSuspended)
  {
    DEBUGF2(1,("CSTi7111HDFormatterOutput::Start output is suspended\n"));
    return false;
  }

  /*
   * First try to change the display mode on the fly, if that works there is
   * nothing else to do.
   */
  if(TryModeChange(mode, tvStandard))
  {
    DEBUGF2(2,("CSTi7111HDFormatterOutput::Start - mode change successfull\n"));
    return true;
  }

  if(m_pCurrentMode)
  {
    DEBUGF2(1,("CSTi7111HDFormatterOutput::Start - failed, output is active\n"));
    return false;
  }


  if(tvStandard & STM_OUTPUT_STD_HD_MASK)
  {
    if(!StartHDDisplay(mode))
      return false;
  }
  else if(tvStandard & STM_OUTPUT_STD_SD_MASK)
  {
    if(!StartSDInterlacedDisplay(mode, tvStandard))
      return false;
  }
  else if(tvStandard & (STM_OUTPUT_STD_ED_MASK | STM_OUTPUT_STD_VESA))
  {
    /*
     * Note that this path also deals with VESA (VGA) modes.
     */
    if(!StartSDProgressiveDisplay(mode, tvStandard))
      return false;
  }
  else
  {
    DEBUGF2(1,("CSTi7111HDFormatterOutput::Start Unsupported Output Standard\n"));
    return false;
  }

  if(!m_pMixer->Start(mode))
  {
    DEBUGF2(1,("CSTi7111HDFormatterOutput::Start Mixer start failed\n"));
    return false;
  }


  /*
   * We don't want anything from CGenericGammaOutput::Start, but we do
   * need to call the base class Start.
   */
  COutput::Start(mode, tvStandard);

  SetOutputFormat(m_ulOutputFormat);
  EnableDACs();

  DEXIT();
  return true;
}
示例#14
0
HRESULT CChannelMixer::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  if (!m_pNextSink)
    return VFW_E_TYPE_NOT_ACCEPTED;

  bool bApplyChanges = (nApplyChangesDepth != 0);
  if (nApplyChangesDepth != INFINITE && nApplyChangesDepth > 0)
    nApplyChangesDepth--;

  if (pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
    return VFW_E_TYPE_NOT_ACCEPTED;

  HRESULT hr = S_OK;
  bool expandToStereo = pwfx->Format.nChannels == 1 && m_pSettings->m_bExpandMonoToStereo;

  if (!m_pSettings->m_bForceChannelMixing && !expandToStereo)
  {
    // try the format directly
    hr = m_pNextSink->NegotiateFormat(pwfx, nApplyChangesDepth, pChOrder);
    if (SUCCEEDED(hr))
    {
      if (bApplyChanges)
      {
        SetInputFormat(pwfx);
        SetOutputFormat(pwfx);
        m_bPassThrough = false;
        hr = SetupConversion(*pChOrder);
      }

      m_chOrder = *pChOrder;
      return hr;
    }
  }

  WAVEFORMATEXTENSIBLE* pOutWfx;
  CopyWaveFormatEx(&pOutWfx, pwfx);

  if (!expandToStereo || m_pSettings->m_bForceChannelMixing)
  {
    pOutWfx->dwChannelMask = m_pSettings->m_lSpeakerConfig;
    pOutWfx->Format.nChannels = m_pSettings->m_lSpeakerCount;
  }
  else // Expand mono to stereo
  {
    pOutWfx->dwChannelMask = KSAUDIO_SPEAKER_STEREO;
    pOutWfx->Format.nChannels = 2;
  }

  pOutWfx->SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;  
  pOutWfx->Format.nBlockAlign = pOutWfx->Format.wBitsPerSample / 8 * pOutWfx->Format.nChannels;
  pOutWfx->Format.nAvgBytesPerSec = pOutWfx->Format.nBlockAlign * pOutWfx->Format.nSamplesPerSec;
  
  hr = m_pNextSink->NegotiateFormat(pOutWfx, nApplyChangesDepth, pChOrder);
  m_chOrder = *pChOrder;


  if (FAILED(hr))
  {
    SAFE_DELETE_WAVEFORMATEX(pOutWfx);
    return hr;
  }

  if (bApplyChanges)
  {
    LogWaveFormat(pwfx, "MIX  - applying ");

    m_bPassThrough = false;
    SetInputFormat(pwfx);
    SetOutputFormat(pOutWfx, true);
    hr = SetupConversion(*pChOrder);
  }
  else
  {
    LogWaveFormat(pwfx, "MIX  -          ");
    SAFE_DELETE_WAVEFORMATEX(pOutWfx);
  }

  return hr;
}
HRESULT CSampleRateConverter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  if (!m_pNextSink)
    return VFW_E_TYPE_NOT_ACCEPTED;

  bool bApplyChanges = (nApplyChangesDepth != 0);
  if (nApplyChangesDepth != INFINITE && nApplyChangesDepth > 0)
    nApplyChangesDepth--;

  // Try passthrough
  HRESULT hr = m_pNextSink->NegotiateFormat(pwfx, nApplyChangesDepth, pChOrder);
  if (SUCCEEDED(hr))
  {
    if (bApplyChanges)
    {
      m_bPassThrough = true;
      SetInputFormat(pwfx);
      SetOutputFormat(pwfx);
    }

    m_chOrder = *pChOrder;
    return hr;
  }

  if (pwfx->SubFormat != KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
    return VFW_E_TYPE_NOT_ACCEPTED;

  WAVEFORMATEXTENSIBLE* pOutWfx;
  CopyWaveFormatEx(&pOutWfx, pwfx);
  pOutWfx->Format.nSamplesPerSec = 0;

  hr = VFW_E_TYPE_NOT_ACCEPTED;

  const unsigned int sampleRateCount = sizeof(gAllowedSampleRates) / sizeof(int);
  unsigned int startPoint = 0;

  // TODO test duplicate sample rates first

  // Search for the input sample rate in sample rate array
  bool foundSampleRate = false;
  for (unsigned int i = 0; i < sampleRateCount && !foundSampleRate; i++)
  {
    if (gAllowedSampleRates[i] == pwfx->Format.nSamplesPerSec)
    {
      startPoint = ++i; // select closest sample rate in ascending order 
      foundSampleRate = true;
    }
  }

  if (!foundSampleRate)
    Log("CSampleRateConverter::NegotiateFormat - sample rate (%d) not found in the source array", pwfx->Format.nSamplesPerSec);
  
  unsigned int sampleRatesTested = 0;
  for (int i = startPoint; FAILED(hr) && pOutWfx->Format.nSamplesPerSec == 0 && sampleRatesTested < sampleRateCount; i++)
  {
    if (pOutWfx->Format.nSamplesPerSec == pwfx->Format.nSamplesPerSec)
    {
      sampleRatesTested++;
      continue; // skip if same as source
    }

    pOutWfx->Format.nSamplesPerSec = gAllowedSampleRates[i];
    pOutWfx->Format.nAvgBytesPerSec = gAllowedSampleRates[i] * pOutWfx->Format.nBlockAlign;

    hr = m_pNextSink->NegotiateFormat(pOutWfx, nApplyChangesDepth, pChOrder);
    sampleRatesTested++;

    if (FAILED(hr))
      pOutWfx->Format.nSamplesPerSec = 0;

    // Search from the lower end
    if (i == sampleRateCount - 1)
      i = 0;
  }

  if (FAILED(hr))
  {
    SAFE_DELETE_WAVEFORMATEX(pOutWfx);
    return hr;
  }
  if (bApplyChanges)
  {
    LogWaveFormat(pwfx, "SRC  - applying ");

    m_bPassThrough = false;
    SetInputFormat(pwfx);
    SetOutputFormat(pOutWfx, true);
    hr = SetupConversion();
    // TODO: do something meaningfull if SetupConversion fails
    //if (FAILED(hr))
  }
  else
  {
    LogWaveFormat(pwfx, "SRC  -          ");
    SAFE_DELETE_WAVEFORMATEX(pOutWfx);
  }

  m_chOrder = *pChOrder;

  return S_OK;
}
示例#16
0
bool Exporter::exportImageFormat(ImageDataObject & image, const QString & fileName)
{
    QString ext = QFileInfo(fileName).suffix().toLower();

    vtkSmartPointer<vtkImageWriter> writer;

    if (ext == "png")
    {
        writer = vtkSmartPointer<vtkPNGWriter>::New();
    }
    else if (ext == "jpg" || ext == "jpeg")
    {
        writer = vtkSmartPointer<vtkJPEGWriter>::New();
    }
    else if (ext == "bmp")
    {
        writer = vtkSmartPointer<vtkBMPWriter>::New();
    }

    if (!writer)
    {
        return false;
    }

    const auto scalars = image.dataSet()->GetPointData()->GetScalars();
    if (!scalars)
    {
        return false;
    }

    const auto components = scalars->GetNumberOfComponents();
    if (components != 1 && components != 3 && components != 4)
    {
        return false;
    }

    if (scalars->GetDataType() == VTK_UNSIGNED_CHAR)
    {
        writer->SetInputData(image.dataSet());
    }
    else
    {
        auto toUChar = vtkSmartPointer<ImageMapToColors>::New();
        toUChar->SetInputData(image.dataSet());

        auto lut = vtkSmartPointer<vtkLookupTable>::New();
        lut->SetNumberOfTableValues(0xFF);
        lut->SetHueRange(0, 0);
        lut->SetSaturationRange(0, 0);
        lut->SetValueRange(0, 1);

        ValueRange<> totalRange;
            
        for (int c = 0; c < components; ++c)
        {
            ValueRange<> range;
            scalars->GetRange(range.data(), c);
            totalRange.add(range);
        }

        toUChar->SetOutputFormat(
            components == 3 ? VTK_RGB :
            (components == 4 ? VTK_RGBA :
                VTK_LUMINANCE));

        toUChar->SetLookupTable(lut);

        writer->SetInputConnection(toUChar->GetOutputPort());
    }

    writer->SetFileName(fileName.toUtf8().data());
    writer->Write();

    return true;
}
示例#17
0
bool CSTmHDMI::Create(void)
{
  DENTRY();

  m_statusLock = g_pIOS->CreateResourceLock();
  if(m_statusLock == 0)
  {
    DEBUGF2(2,("CSTmHDMI::Create failed to create status lock\n"));
    return false;
  }

  if(!m_pIFrameManager)
  {
    /*
     * If a subclass has not already created an InfoFrame manager then
     * create the default CPU driven one.
     */
    m_pIFrameManager = new CSTmCPUIFrames(m_pDisplayDevice,m_ulHDMIOffset);
    if(!m_pIFrameManager || !m_pIFrameManager->Create(this,this))
    {
      DERROR("Unable to create an Info Frame manager\n");
      return false;
    }
  }

  m_ulIFrameManagerIntMask = m_pIFrameManager->GetIFrameCompleteHDMIInterruptMask();

  if(m_bUseHotplugInt)
  {
    /*
     * Poll current status, as if the display is connected first time around
     * it will not trigger a hotplug interrupt.
     */
    ULONG hotplugstate = (ReadHDMIReg(STM_HDMI_STA) & STM_HDMI_STA_HOT_PLUG);
#if defined(SPARK) || defined(SPARK7162)
    if(hotplugstate == 0)
#elif defined(ADB2850) || defined(DSI87)
    if(((box_type==0)&&(hotplugstate == 0))||((box_type==1)&&(hotplugstate != 0)))
#else
    if(hotplugstate != 0)
#endif
    {
      m_displayStatus = STM_DISPLAY_NEEDS_RESTART;
      DEBUGF2(2,("CSTmHDMI::Create out initial hotplug detected\n"));
    }
    else
    {
      m_displayStatus = STM_DISPLAY_DISCONNECTED;
    }

    /*
     * Initialize the hot plug interrupt.
     */
    WriteHDMIReg(STM_HDMI_INT_EN, (STM_HDMI_INT_GLOBAL    |
                                   STM_HDMI_INT_HOT_PLUG));
  }

  /*
   * Set the default output format, which may be sub-classed to program
   * hardware. This needs to be done after the status lock has been created
   * and will set m_ulOutputFormat for us.
   */
  SetOutputFormat(STM_VIDEO_OUT_DVI | STM_VIDEO_OUT_RGB);

  InvalidateAudioPackets();

  /*
   * Configure a default audio clock regeneration parameter as some sinks are
   * unhappy if they cannot regenerate a valid audio clock, even if no valid
   * audio is being transmitted.
   */
  WriteHDMIReg(STM_HDMI_AUDN, 6144);

  DEXIT();

  return true;
}
示例#18
0
STDMETHODIMP EMReadFrame::GrabFrame(EMMediaDataBuffer *p_opData, REFERENCE_TIME& p_vSampleTime)
{
	//WaitForSingleObject(m_oEventGrabWaitingForSeek,1000);
	WaitForSingleObject(m_oSemSeekWaitingForGrab,1000);
	//ResetEvent(m_oEventSeekWaitingForGrab);
	m_vFrameValid2 = false;
	if(m_vFrameValid)
	{
		ReleaseSemaphore(m_oSemSeekWaitingForGrab, 1, &m_vWaitForFrameToArriveCount);
		return E_FAIL;
	}

	if(m_vNumberOfTimesUsedSemaphores > m_vNumberOfTimesReleasedSemaphores)
	{
			//eo << "JUMP IN HERE" << ef;				
	}

//	WaitForSingleObject(m_hSem, INFINITE);

	if (m_vIsRunning || !m_vEndOfTheLine)
	{
		//WaitForSingleObject(m_hSem, INFINITE);
		if (!m_vIsRunning || m_vEndOfTheLine)
		{
			//SetEvent(m_oEventSeekWaitingForGrab);
			ReleaseSemaphore(m_oSemSeekWaitingForGrab, 1, &m_vWaitForFrameToArriveCount);
			return E_FAIL;
		}
		if (SizeHasChanged(&p_opData->m_oFormat))
		{
			SetOutputFormat(&(p_opData -> m_oFormat));
		}

		m_opBuffer = p_opData->Data();

		//ReleaseSemaphore(m_hSemWaitForFrameToArrive, 1, &m_vWaitForFrameToArriveCount);
		//ReleaseSemaphore(m_hSemWaitBeforeSeekOrGrab, 1, &m_vWaitBeforeSeekOrGrab);

		m_vNumberOfTimesUsedSemaphores++;

		//SetEvent(m_oEventWaitingForGrab);
		ReleaseSemaphore(m_oSemWaitingForGrab, 1, &m_vWaitBeforeSeekOrGrab);
		//WaitForSingleObject(m_oEventWaitingForSampleCB, INFINITE);
		WaitForSingleObject(m_oSemWaitingForSampleCB, 1000);//INFINITE);
		//ResetEvent(m_oEventWaitingForGrab);
		//ResetEvent(m_oEventWaitingForSampleCB);

		//WaitForSingleObject(m_hSemWaitForSampleCBToFinish, 100);//INFINITE);

		p_vSampleTime = (m_vSampleTime + m_vSegmentStart)/10;
		if(p_opData->Data() == NULL)
		{
			//eo << "Break Here" << ef;
		}

		m_vEndOfTheLine = false;
		//SetEvent(m_oEventSeekWaitingForGrab);
		ReleaseSemaphore(m_oSemSeekWaitingForGrab, 1, &m_vWaitForFrameToArriveCount);

		return S_OK;
	}
	
	//SetEvent(m_oEventSeekWaitingForGrab);
	ReleaseSemaphore(m_oSemSeekWaitingForGrab, 1, &m_vWaitForFrameToArriveCount);
	return E_FAIL;
}
示例#19
0
void CSTmHDMI::SetControl(stm_output_control_t ctrl, ULONG ulNewVal)
{
  switch (ctrl)
  {
    case STM_CTRL_SIGNAL_RANGE:
    {
      if(ulNewVal > STM_SIGNAL_VIDEO_RANGE)
        break;

      /*
       * Default implementation which just changes the AVI frame quantization
       * range, but doesn't enforce this in the hardware. Chips that can
       * actually change the clipping behaviour in the digital output stage
       * will override this to change the hardware as well.
       */
      m_signalRange = (stm_display_signal_range_t)ulNewVal;
      m_pIFrameManager->ForceAVIUpdate();
      break;
    }
    case STM_CTRL_HDMI_SINK_SUPPORTS_DEEPCOLOUR:
    {
      m_bSinkSupportsDeepcolour = (ulNewVal != 0);
      break;
    }
    case STM_CTRL_HDMI_PHY_CONF_TABLE:
    {
      m_pPHYConfig = (stm_display_hdmi_phy_config_t *)ulNewVal;
      break;
    }
    case STM_CTRL_HDMI_CEA_MODE_SELECT:
    {
      if(ulNewVal > STM_HDMI_CEA_MODE_16_9)
        break;

      m_pIFrameManager->SetCEAModeSelection((stm_hdmi_cea_mode_selection_t)ulNewVal);
      break;
    }
    case STM_CTRL_HDMI_AVI_QUANTIZATION:
    {
      if(ulNewVal > STM_HDMI_AVI_QUANTIZATION_BOTH)
        break;

      m_pIFrameManager->SetQuantization((stm_hdmi_avi_quantization_t)ulNewVal);
      break;
    }
    case STM_CTRL_HDMI_OVERSCAN_MODE:
    {
      if(ulNewVal > HDMI_AVI_INFOFRAME_UNDERSCAN)
        break;

      m_pIFrameManager->SetOverscanMode(ulNewVal);
      break;
    }
    case STM_CTRL_HDMI_CONTENT_TYPE:
    {
      m_pIFrameManager->SetAVIContentType(ulNewVal);
      break;
    }
    case STM_CTRL_AVMUTE:
    {
      g_pIOS->LockResource(m_statusLock);
      m_bAVMute = (ulNewVal != 0);
      SendGeneralControlPacket();
      g_pIOS->UnlockResource(m_statusLock);
      break;
    }
    case STM_CTRL_HDMI_POSTAUTH:
    {
      this->PostAuth((bool) ulNewVal);
      break;
    }
    case STM_CTRL_HDCP_ADVANCED:
    {
      /*
       * We need to keep track of HDCP mode to set the correct control
       * signaling in DVI mode.
       */
      m_bESS = (ulNewVal != 0);

      if(m_pCurrentMode && (m_ulOutputFormat & STM_VIDEO_OUT_DVI))
      {
        ULONG hdmicfg = ReadHDMIReg(STM_HDMI_CFG);

        if(m_bESS)
        {
          DEBUGF2(2,("CSTmHDMI::SetControl - DVI Extended ESS Signalling\n"));
          hdmicfg |= STM_HDMI_CFG_ESS_NOT_OESS;
        }
        else
        {
          DEBUGF2(2,("CSTmHDMI::SetControl - DVI Original ESS Signalling\n"));
      	  hdmicfg &= ~STM_HDMI_CFG_ESS_NOT_OESS;
        }

        WriteHDMIReg(STM_HDMI_CFG, hdmicfg);
      }

      break;
    }
    case STM_CTRL_VIDEO_OUT_SELECT:
    {
      SetOutputFormat(ulNewVal);
      break;
    }
    case STM_CTRL_YCBCR_COLORSPACE:
    {
      if(ulNewVal > STM_YCBCR_COLORSPACE_709)
        return;

      m_pIFrameManager->SetColorspaceMode(static_cast<stm_ycbcr_colorspace_t>(ulNewVal));
      break;
    }
    default:
    {
      DEBUGF2(2,("CSTmHDMI::SetControl Attempt to modify unexpected control %d\n",ctrl));
      break;
    }
  }
}
示例#20
0
// Format negotiation
HRESULT CWASAPIRenderFilter::NegotiateFormat(const WAVEFORMATEXTENSIBLE* pwfx, int nApplyChangesDepth, ChannelOrder* pChOrder)
{
  if (!pwfx)
    return VFW_E_TYPE_NOT_ACCEPTED;

  if (FormatsEqual(pwfx, m_pInputFormat))
  {
    *pChOrder = m_chOrder;
    return S_OK;
  }

  bool bApplyChanges = nApplyChangesDepth != 0;

  bool bitDepthForced = (m_pSettings->GetForceBitDepth() != 0 && m_pSettings->GetForceBitDepth() != pwfx->Format.wBitsPerSample);
  bool sampleRateForced = (m_pSettings->GetForceSamplingRate() != 0 && m_pSettings->GetForceSamplingRate() != pwfx->Format.nSamplesPerSec);
  
  if ((bitDepthForced || sampleRateForced) &&
       pwfx->SubFormat == KSDATAFORMAT_SUBTYPE_IEC61937_DOLBY_DIGITAL ||
       pwfx->SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT && bitDepthForced)
    return VFW_E_TYPE_NOT_ACCEPTED;
  
  if (((bitDepthForced && m_pSettings->GetForceBitDepth() != pwfx->Format.wBitsPerSample) ||
       (sampleRateForced && m_pSettings->GetForceSamplingRate() != pwfx->Format.nSamplesPerSec)))
    return VFW_E_TYPE_NOT_ACCEPTED;

  CAutoLock lock(&m_csResources);

  HRESULT hr = CreateAudioClient();
  if (FAILED(hr))
  {
    Log("CWASAPIRenderFilter::NegotiateFormat Error, audio client not initialized: (0x%08x)", hr);
    return VFW_E_CANNOT_CONNECT;
  }

  WAVEFORMATEXTENSIBLE* pwfxAccepted = NULL;
  WAVEFORMATEXTENSIBLE outFormat = *pwfx;
  hr = IsFormatSupported(pwfx, &pwfxAccepted);

  // Try different speaker setup
  if (FAILED(hr))
  {
    DWORD dwSpeakers = pwfx->dwChannelMask;
    if (dwSpeakers == KSAUDIO_SPEAKER_5POINT1)
      dwSpeakers = KSAUDIO_SPEAKER_5POINT1_SURROUND;
    else if (dwSpeakers == KSAUDIO_SPEAKER_5POINT1_SURROUND)
      dwSpeakers = KSAUDIO_SPEAKER_5POINT1;
    else if (dwSpeakers == KSAUDIO_SPEAKER_7POINT1)
      dwSpeakers = KSAUDIO_SPEAKER_7POINT1_SURROUND;
    else if (dwSpeakers == KSAUDIO_SPEAKER_7POINT1_SURROUND)
      dwSpeakers = KSAUDIO_SPEAKER_7POINT1;

    if (dwSpeakers != pwfx->dwChannelMask)
    {
      outFormat.dwChannelMask = dwSpeakers;
      hr = IsFormatSupported(&outFormat, &pwfxAccepted);
    }
  }

  if (FAILED(hr))
  {
    SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);
    return hr;
  }

  if (bApplyChanges)
  {
    LogWaveFormat(&outFormat, "REN - applying  ");

    // Stop and discard audio client
    StopAudioClient();
    SAFE_RELEASE(m_pRenderClient);
    SAFE_RELEASE(m_pAudioClock);
    SAFE_RELEASE(m_pAudioClient);

    // We must use incoming format so the WAVEFORMATEXTENSIBLE to WAVEFORMATEXT difference
    // that some audio drivers require is not causing an infinite loop of format changes
    SetInputFormat(pwfx);
    SetOutputFormat(&outFormat);

    // Reinitialize audio client
    hr = CreateAudioClient(true);
  }
  else
    LogWaveFormat(pwfx, "Input format    ");

  m_chOrder = *pChOrder = DS_ORDER;
  SAFE_DELETE_WAVEFORMATEX(pwfxAccepted);

  return hr;
}