Пример #1
0
void AudioJitTests()
{
	DSPJitTester::Initialize();
	dar();
	iar();
	subarn();
	addarn();
	sbclr();
	sbset();

	nx_ir();
	nx_dr();
	nx_nr();
	nx_mv();

	set16_l();

	nx_s();
	nx_sn();
	nx_l();
	nx_ln();
	nx_ls();
	nx_lsn();
	nx_lsm();
	nx_lsnm();
	nx_sl();
	nx_sln();
	nx_slm();
	nx_slnm();
	nx_ld();
}
Пример #2
0
void ActionWithGrid::createGrid( const std::string& type, const std::string& inputstr ){
  // Start creating the input for the grid
  std::string vstring = inputstr; 
  if( keywords.exists("KERNEL") ){
      std::string vconc; parse("CONCENTRATION",vconc);
      if( vconc.length()>0 ){
          vstring += " TYPE=fibonacci CONCENTRATION=" + vconc;   
      } else {
          std::string kstring; parse("KERNEL",kstring);
          if( kstring=="DISCRETE" ) vstring += " KERNEL=" + kstring;
          else vstring += " KERNEL=" + kstring + " " + getKeyword("BANDWIDTH");
      }
  }
  vesselbase::VesselOptions da("mygrid","",-1,vstring,this);
  Keywords keys; gridtools::AverageOnGrid::registerKeywords( keys );
  vesselbase::VesselOptions dar( da, keys );
  if( type=="histogram" ){
     mygrid = new HistogramOnGrid(dar); 
  } else if( type=="average" ){
     mygrid = new AverageOnGrid(dar); 
  } else if( type=="grid" ){
     mygrid = new GridVessel(dar); 
  } else {
     plumed_merror("no way to create grid of type " + type );
  } 
}
Пример #3
0
HRESULT TvideoCodecQuickSync::DeliverSurface(QsFrameData* frameData)
{
    if (NULL == frameData) { return E_POINTER; }

    unsigned char* dstBuffer[4] = {frameData->y, frameData->u, 0, 0};
    ptrdiff_t strides[4] = {frameData->dwStride, frameData->dwStride, 0, 0};
    int frametype, fieldtype;

    // set frame type - Not curently available!
    switch (frameData->frameType) {
        case QsFrameData::P:
            frametype = FRAME_TYPE::P;
            break;
        case QsFrameData::B:
            frametype = FRAME_TYPE::B;
            break;
        case QsFrameData::I:
        default:
            frametype = FRAME_TYPE::I;
    }

    // interlacing info
    // progressive
    if (frameData->dwInterlaceFlags & AM_VIDEO_FLAG_WEAVE) {
        fieldtype = FIELD_TYPE::PROGRESSIVE_FRAME;
    }
    // interlaced
    else {
        fieldtype = (frameData->dwInterlaceFlags & AM_VIDEO_FLAG_FIELD1FIRST) ?
                    (FIELD_TYPE::INT_TFF) :
                    (FIELD_TYPE::INT_BFF);
    }

    Trect r(frameData->rcClip);
    TffPict pict(FF_CSP_NV12, dstBuffer, strides, r, frameData->bReadOnly, frametype, fieldtype, 0, NULL); //TODO: src frame size
    pict.rectClip = frameData->rcClip;

    // set times stamps
    pict.rtStart = frameData->rtStart;
    pict.rtStop  = frameData->rtStop;

    // aspect ratio
    if (frameData->dwPictAspectRatioX * frameData->dwPictAspectRatioY != 0) {
        Rational dar(frameData->dwPictAspectRatioX, frameData->dwPictAspectRatioY);
        pict.setDar(dar);
    }

    // soft telecine detection
    // if "Detect soft telecine and average frame durations" is enabled,
    // flames are flagged as progressive, frame durations are averaged.
    // pict.film is valid even if the setting is disabled.
    telecineManager.new_frame(
        0 != (frameData->dwInterlaceFlags & AM_VIDEO_FLAG_FIELD1FIRST),
        0 != (frameData->dwInterlaceFlags & AM_VIDEO_FLAG_REPEAT_FIELD),
        pict.rtStart,
        pict.rtStop);
    telecineManager.get_fieldtype(pict);
    telecineManager.get_timestamps(pict);

    return sinkD->deliverDecodedSample(pict);
}