void Player::FrameMove(float fElapsedTime) { GetInput(); XMMATRIX mBodyCameraRot = XMMatrixRotationRollPitchYaw(0, m_fYawAngle, 0); XMMATRIX mHeadCameraRot = XMMatrixRotationRollPitchYaw(m_fPitchAngle, 0, 0); mHeadCameraRot.r[3] = XMVectorSet(0.f, 0.6f, 0.f, 1.f); mWorld = mBodyCameraRot * XMMatrixTranslationFromVector(mWorld.Translation()); m_pHead->SetRelativeWorld(mHeadCameraRot); UpdateAcceleration(fElapsedTime); ControllerNode::FrameMove(fElapsedTime); }
ComponentResult karoke::Initialize() { // get the current numChannels for input and output. // a host may test various combinations of these // regardless of the outInfo returned by our SupportedNumChannels method SInt16 auNumInputs = (SInt16) GetInput(0)->GetStreamFormat().mChannelsPerFrame; SInt16 auNumOutputs = (SInt16) GetOutput(0)->GetStreamFormat().mChannelsPerFrame; if ((auNumInputs == NUM_INPUTS) && (auNumOutputs == NUM_OUTPUTS)) { MaintainKernels(); return noErr; } else return kAudioUnitErr_FormatNotSupported; }
FPooledRenderTargetDesc FRCPassPostProcessSubsurfaceRecombine::ComputeOutputDesc(EPassOutputId InPassOutputId) const { FPooledRenderTargetDesc Ret = GetInput(ePId_Input0)->GetOutput()->RenderTargetDesc; Ret.Reset(); Ret.DebugName = TEXT("SceneColorSubsurface"); if(bSingleViewportMode) { // we don't need an alpha channel any more as it was used for ScreenSpaceSubsurfaceScattering only Ret.Format = (Ret.Format == PF_FloatRGBA) ? PF_FloatRGB : PF_FloatRGBA; } // we replace the HDR SceneColor with this one return Ret; }
EditorCell *GroupCell::GetEditable() { switch (m_groupType) { case GC_TYPE_CODE: case GC_TYPE_IMAGE: case GC_TYPE_TEXT: case GC_TYPE_TITLE: case GC_TYPE_SECTION: case GC_TYPE_SUBSECTION: case GC_TYPE_SUBSUBSECTION: return GetInput(); case GC_TYPE_PAGEBREAK: default: return NULL; } }
void FRCPassPostProcessOutput::Process(FRenderingCompositePassContext& Context) { const FRenderingCompositeOutputRef* Input = GetInput(ePId_Input0); if(!Input) { // input is not hooked up correctly return; } // pass through PassOutputs[0].PooledRenderTarget = Input->GetOutput()->PooledRenderTarget; check(ExternalRenderTarget); *ExternalRenderTarget = PassOutputs[0].PooledRenderTarget; }
void main() { int num; int i; i=0; Print("Please choose how many simulation process you want to exec: ",sizeof("Please choose how many simulation process you want to exec: ")/sizeof(char),0,0); num=GetInput(); if (num>8) { Print("The max process number is 8.\n",sizeof("The max process number is 8.\n")/sizeof(char),0,0); } else { for (;i<num; i++) { Exec("../test/DoctorOffice", sizeof("../test/DoctorOffice")/sizeof(char)); } } Exit(0); }
void mitk::NavigationDataToPointSetFilter::GenerateDataMode4D() { mitk::PointSet* output = GetOutput(); assert(output); for (unsigned int index = 0; index < this->GetNumberOfInputs(); index++) { const mitk::NavigationData* nd = GetInput(index); assert(nd); mitk::NavigationData::PositionType point = nd->GetPosition(); //get the position output->SetPoint( index, point, m_CurrentTimeStep); //store it in the pointset always at the current time step } if (m_CurrentTimeStep == m_RingBufferSize - 1) // update ring buffer index m_CurrentTimeStep = 0; else m_CurrentTimeStep++; }
SVDF::SVDF(const Operation& operation, std::vector<RunTimeOperandInfo>& operands) { input_ = GetInput(operation, operands, kInputTensor); weights_feature_ = GetInput(operation, operands, kWeightsFeatureTensor); weights_time_ = GetInput(operation, operands, kWeightsTimeTensor); bias_ = GetInput(operation, operands, kBiasTensor); state_in_ = GetInput(operation, operands, kStateInTensor); params_.rank_ = getScalarData<int>(*GetInput(operation, operands, kRankParam)); params_.activation_ = static_cast<TfLiteFusedActivation>(getScalarData<int>( *GetInput(operation, operands, kActivationParam))); state_out_ = GetOutput(operation, operands, kStateOutTensor); output_ = GetOutput(operation, operands, kOutputTensor); }
avtDataObject_p avtActualDataMinMaxQuery::ApplyFilters(avtDataObject_p inData) { Preparation(inData); bool zonesPreserved = GetInput()->GetInfo().GetValidity().GetZonesPreserved(); zonesPreserved = (bool)UnifyMinimumValue((int)zonesPreserved); if (!timeVarying && zonesPreserved) { avtContract_p contract = inData->GetOriginatingSource()->GetGeneralContract(); avtDataset_p ds; CopyTo(ds, inData); avtSourceFromAVTDataset termsrc(ds); avtDataObject_p obj = termsrc.GetOutput(); condense->SetInput(obj); avtDataObject_p retObj = condense->GetOutput(); retObj->Update(contract); return retObj; } else { avtDataRequest_p oldSpec = inData->GetOriginatingSource()-> GetGeneralContract()->GetDataRequest(); avtDataRequest_p newDS = new avtDataRequest(oldSpec, querySILR); newDS->SetTimestep(queryAtts.GetTimeStep()); newDS->SetVariable(queryAtts.GetVariables()[0].c_str()); if (!zonesPreserved) newDS->TurnZoneNumbersOn(); avtContract_p contract = new avtContract(newDS, queryAtts.GetPipeIndex()); avtDataObject_p temp; CopyTo(temp, inData); condense->SetInput(temp); avtDataObject_p retObj = condense->GetOutput(); retObj->Update(contract); return retObj; } }
void avtMissingDataFilter::PostExecute(void) { // Call the base class's PostExecute. Set the spatial dimension to zero to // bypass a check in avtDataObjectToDatasetFilter::PostExecute that causes // all unstructured meshes with tdim<sdim to become polydata. We don't want // that so work around it by setting tdim==sdim. Then we do the PostExecute // and restore the old value. avtDataAttributes &outAtts = GetOutput()->GetInfo().GetAttributes(); int sdim = outAtts.GetSpatialDimension(); int tdim = outAtts.GetTopologicalDimension(); if(tdim < sdim && sdim >= 2) outAtts.SetTopologicalDimension(sdim); avtDataTreeIterator::PostExecute(); if(tdim < sdim && sdim >= 2) outAtts.SetTopologicalDimension(tdim); if(removeMode) { // If anyone removed data, redo the original extents. int dataWasRemoved = (int) this->removedData; if (canDoCollectiveCommunication) dataWasRemoved = UnifyMaximumValue(dataWasRemoved); if(dataWasRemoved > 0) { avtDataAttributes &atts = GetInput()->GetInfo().GetAttributes(); avtDataset_p ds = GetTypedOutput(); int nVars = atts.GetNumberOfVariables(); double de[2]; for (int i = 0 ; i < nVars ; i++) { const char *vname = atts.GetVariableName(i).c_str(); if (! contract->ShouldCalculateVariableExtents(vname)) continue; bool foundDE = avtDatasetExaminer::GetDataExtents(ds, de, vname); if (foundDE) { outAtts.GetOriginalDataExtents(vname)->Merge(de); outAtts.GetThisProcsOriginalDataExtents(vname)->Merge(de); } } } } }
void OscNode::Process(unsigned int bufsize) { if (bufsize>(unsigned int)m_Output.GetLength()) { m_Output.Allocate(bufsize); } ProcessChildren(bufsize); if (ChildExists(0) && !GetChild(0)->IsTerminal()) { m_WaveTable.ProcessFM(bufsize, m_Output, GetInput(0)); } else { m_WaveTable.Process(bufsize, m_Output); } }
//GetInput bool Profiles::GetInput (enInput nInput, bool *pbState, bool bMouseMovement) { *pbState = false; for (unsigned int i = 0; i != m_lProfiles.size (); i++) { if (!GetInput (nInput, pbState, i, bMouseMovement)) { return false; } if (*pbState) { break; } } return true; }//GetInput
void avtRevolvedVolume::PreExecute(void) { avtSingleInputExpressionFilter::PreExecute(); avtDataAttributes &atts = GetInput()->GetInfo().GetAttributes(); if (atts.GetSpatialDimension() != 2) { EXCEPTION2(InvalidDimensionsException, "Revolved volume", "2-dimensional"); } haveIssuedWarning = false; if (atts.GetMeshCoordType() == AVT_ZR) revolveAboutX = false; else revolveAboutX = true; }
void avtMeshFilter::UpdateDataObjectInfo(void) { // // Technically, the topological dimension should be 2 if drawing // in opaque mode, and 1 otherwise. However, setting the // topological dimension to 2 will not allow the mesh to be // shifted towards the camera (in ShiftPlot operation), // and thus other plots will possibly obscure the mesh lines, making // them appear less-than solid. // GetOutput()->GetInfo().GetAttributes().SetKeepNodeZoneArrays(keepNodeZone); if (GetInput()->GetInfo().GetAttributes().GetTopologicalDimension() != 0) GetOutput()->GetInfo().GetAttributes().SetTopologicalDimension(1); GetOutput()->GetInfo().GetValidity().InvalidateZones(); GetOutput()->GetInfo().GetValidity().SetNormalsAreInappropriate(true); }
void avtSphereSliceFilter::UpdateDataObjectInfo(void) { avtDataAttributes &inAtts = GetInput()->GetInfo().GetAttributes(); avtDataAttributes &outAtts = GetOutput()->GetInfo().GetAttributes(); avtDataValidity &outValidity = GetOutput()->GetInfo().GetValidity(); outAtts.SetTopologicalDimension(inAtts.GetTopologicalDimension()-1); outValidity.InvalidateZones(); outValidity.ZonesSplit(); char params[200]; SNPRINTF(params, 200, "origin=%lg,%lg,%lg radius=%lg", atts.GetOrigin()[0], atts.GetOrigin()[1], atts.GetOrigin()[2], atts.GetRadius()); GetOutput()->GetInfo().GetAttributes().AddFilterMetaData("SphereSlice", params); }
FPooledRenderTargetDesc FRCPassPostProcessNoiseBlur::ComputeOutputDesc(EPassOutputId InPassOutputId) const { FPooledRenderTargetDesc Ret = GetInput(ePId_Input0)->GetOutput()->RenderTargetDesc; Ret.Reset(); if(OverrideFormat != PF_Unknown) { Ret.Format = OverrideFormat; } Ret.TargetableFlags &= ~TexCreate_UAV; Ret.TargetableFlags |= TexCreate_RenderTargetable; Ret.DebugName = TEXT("NoiseBlur"); return Ret; }
void EffectNode::Process(unsigned int bufsize) { if (bufsize>(unsigned int)m_Output.GetLength()) { m_Output.Allocate(bufsize); } ProcessChildren(bufsize); if (ChildExists(0) && !GetChild(0)->IsTerminal() && ChildExists(1)) { if (m_Type==CLIP) { m_Output=GetInput(0); if (GetChild(1)->IsTerminal()) { HardClip(m_Output, GetChild(1)->GetCVValue()); } else { MovingHardClip(m_Output, GetInput(1)); } } else if (m_Type==DISTORT) { m_Output=GetInput(0); if (GetChild(1)->IsTerminal()) { Distort(m_Output, GetChild(1)->GetCVValue()); } else { MovingDistort(m_Output, GetInput(1)); } } else if (ChildExists(2)) { switch (m_Type) { case CRUSH : m_Output=GetInput(0); Crush(m_Output, GetChild(1)->GetCVValue(), GetChild(2)->GetCVValue()); break; case DELAY : { m_Delay.SetDelay(GetChild(1)->GetCVValue()); m_Delay.SetFeedback(GetChild(2)->GetCVValue()); m_Delay.Process(bufsize, GetInput(0), m_Output); break; } default : assert(0); break; } } } }
void DebugCameraController2D::Update(float timeStep) { // Do not move if the UI has a focused element if (GetUI()->GetFocusElement()) return; // Do not move if interacting with UI controls if (GetSystemUI()->IsAnyItemActive()) return; Input* input = GetInput(); // Movement speed as world units per second float moveSpeed_ = speed_; if (input->GetKeyDown(KEY_SHIFT)) { moveSpeed_ *= 2; if (input->GetKeyPress(KEY_KP_PLUS)) speed_ += 1.f; else if (input->GetKeyPress(KEY_KP_MINUS)) speed_ -= 1.f; } if (input->GetMouseButtonDown(MOUSEB_RIGHT)) { IntVector2 delta = input->GetMouseMove(); if (input->IsMouseVisible() && delta != IntVector2::ZERO) input->SetMouseVisible(false); GetNode()->Translate2D(Vector2{(float)delta.x_ * -1.f, (float)delta.y_} * moveSpeed_ * timeStep); } else if (!input->IsMouseVisible()) input->SetMouseVisible(true); // Read WASD keys and move the camera scene node to the corresponding direction if they are pressed if (input->GetKeyDown(KEY_W)) GetNode()->Translate(Vector3::UP * moveSpeed_ * timeStep); if (input->GetKeyDown(KEY_S)) GetNode()->Translate(Vector3::DOWN * moveSpeed_ * timeStep); if (input->GetKeyDown(KEY_A)) GetNode()->Translate(Vector3::LEFT * moveSpeed_ * timeStep); if (input->GetKeyDown(KEY_D)) GetNode()->Translate(Vector3::RIGHT * moveSpeed_ * timeStep); }
void avtMeshLogFilter::PostExecute() { avtDataTreeIterator::PostExecute(); avtDataAttributes& inAtts = GetInput()->GetInfo().GetAttributes(); avtDataAttributes& outAtts = GetOutput()->GetInfo().GetAttributes(); // over-write spatial extents outAtts.GetOriginalSpatialExtents()->Clear(); outAtts.GetThisProcsOriginalSpatialExtents()->Clear(); // get the outputs's spatial extents double se[6]; if (inAtts.GetOriginalSpatialExtents()->HasExtents()) { inAtts.GetOriginalSpatialExtents()->CopyTo(se); if (xScaleMode == LOG) { avtMeshLogFilter_ScaleVal(se[0], useInvLogX); avtMeshLogFilter_ScaleVal(se[1], useInvLogX); } if (yScaleMode == LOG) { avtMeshLogFilter_ScaleVal(se[2], useInvLogY); avtMeshLogFilter_ScaleVal(se[3], useInvLogY); } outAtts.GetOriginalSpatialExtents()->Set(se); } else if (inAtts.GetThisProcsOriginalSpatialExtents()->HasExtents()) { inAtts.GetThisProcsOriginalSpatialExtents()->CopyTo(se); if (xScaleMode == LOG) { avtMeshLogFilter_ScaleVal(se[0], useInvLogX); avtMeshLogFilter_ScaleVal(se[1], useInvLogX); } if (yScaleMode == LOG) { avtMeshLogFilter_ScaleVal(se[2], useInvLogY); avtMeshLogFilter_ScaleVal(se[3], useInvLogY); } outAtts.GetThisProcsOriginalSpatialExtents()->Set(se); } }
void avtDatasetToDataObjectFilter::InputSetActiveVariable(const char *varname) { SetActiveVariableArgs args; args.varname = varname; avtDataTree_p tree = GetInputDataTree(); bool success; tree->Traverse(CSetActiveVariable, (void *) &args, success); // // Set up our data members for derived types that need this information. // activeVariableIsPointData = args.activeIsPoint; hasPointVars = args.hasPointVars; hasCellVars = args.hasCellVars; GetInput()->GetInfo().GetAttributes().SetActiveVariable(varname); }
bool avtLCSFilter::NativeMeshIterativeCalc(std::vector<avtIntegralCurve*> &ics) { int offset = 0; if( *fsle_dt == NULL ) { fsle_dt = CreateIterativeCalcDataTree(GetInputDataTree()); if (GetInput()->GetInfo().GetAttributes().DataIsReplicatedOnAllProcessors()) if (PAR_Rank() != 0) fsle_dt = new avtDataTree(); SetOutputDataTree(fsle_dt); } return MultiBlockIterativeCalc(fsle_dt, ics, offset); }
FPooledRenderTargetDesc FRCPassPostProcessHistogramReduce::ComputeOutputDesc(EPassOutputId InPassOutputId) const { FPooledRenderTargetDesc UnmodifiedRet = GetInput(ePId_Input0)->GetOutput()->RenderTargetDesc; UnmodifiedRet.Reset(); FIntPoint PixelExtent = UnmodifiedRet.Extent; // each ThreadGroup outputs one histogram FIntPoint NewSize = FIntPoint(FRCPassPostProcessHistogram::HistogramTexelCount, 2); // for quality float4 to get best quality for smooth eye adaptation transitions FPooledRenderTargetDesc Ret(FPooledRenderTargetDesc::Create2DDesc(NewSize, PF_A32B32G32R32F, FClearValueBinding::None, TexCreate_None, TexCreate_RenderTargetable, false)); Ret.DebugName = TEXT("HistogramReduce"); return Ret; }
void avtDatasetToDatasetFilter::PreExecute(void) { avtDatasetToDataObjectFilter::PreExecute(); avtDataAttributes &atts = GetInput()->GetInfo().GetAttributes(); if (switchVariables) { InputSetActiveVariable(activeVariable); } else if (atts.ValidActiveVariable() && atts.GetVariableName() != pipelineVariable) { if (AutomaticallyMakePipelineVariableActive()) InputSetActiveVariable(pipelineVariable); } }
void mitk::pa::SpectralUnmixingSO2::InitializeOutputs() { // UNUSED unsigned int numberOfInputs = GetNumberOfIndexedInputs(); unsigned int numberOfOutputs = GetNumberOfIndexedOutputs(); mitk::PixelType pixelType = mitk::MakeScalarPixelType<float>(); const int NUMBER_OF_SPATIAL_DIMENSIONS = 3; auto* dimensions = new unsigned int[NUMBER_OF_SPATIAL_DIMENSIONS]; for(unsigned int dimIdx=0; dimIdx<NUMBER_OF_SPATIAL_DIMENSIONS; dimIdx++) { dimensions[dimIdx] = GetInput()->GetDimensions()[dimIdx]; } for (unsigned int outputIdx = 0; outputIdx < numberOfOutputs; outputIdx++) { GetOutput(outputIdx)->Initialize(pixelType, NUMBER_OF_SPATIAL_DIMENSIONS, dimensions); } }
void meNGOptimizeFilter::Update() { try { nglib::Ng_Meshing_Parameters* netgenParam = new nglib::Ng_Meshing_Parameters(); netgenParam->maxh = m_param->m_maxh; netgenParam->curvaturesafety = m_param->m_curvaturesafety; netgenParam->segmentsperedge = m_param->m_segmentsperedge; //netgenParam->optsteps2d = 3; // if the input is only the geometry then generate the mesh before using it if (nglib::Ng_GetNP(GetInput()->GetNGMesh( )) == 0) { GenerateSurfaceMesh( netgenParam ); } else { nglib::Ng_STL_MakeEdges(GetInput()->GetGeom( ),GetInput()->GetNGMesh( ),netgenParam); } netgenParam->optimize2d = "cmSmSm"; nglib::Ng_STL_OptimizeSurfaceMesh( GetInput()->GetGeom( ), GetInput()->GetNGMesh( ), netgenParam ); nglib::Ng_MeshRefinement( GetInput()->GetGeom( ), GetInput()->GetNGMesh( ) ); netgenParam->optimize2d = "m"; nglib::Ng_STL_OptimizeSurfaceMesh( GetInput()->GetGeom( ), GetInput()->GetNGMesh( ), netgenParam ); std::cout<< "optimization done!"<<std::endl; } catch (...) { throw; } }
void CHveditDlg::OnSet() { int n, i; // Update m_Voltage if (!GetInput()) return; // Get current selection n = m_ctlChannels.GetSelItems(m_nChannels, m_Selection); // Set voltage to selected channels for (i = 0; i < n; i++) m_Demand[ChannelIndex(m_Selection[i])] = m_Voltage; UpdateODB(n == 1 ? ChannelIndex(m_Selection[0]) : -1); UpdateListBox(n == 1 ? m_Selection[0] : -1); }
int svlFilterSplitter::AddOutput(const std::string &name, const unsigned int threadcount, const unsigned int buffersize) { if (GetOutput(name)) return SVL_FAIL; svlFilterOutput* output = svlFilterBase::AddOutput(name, false); output->SetThreadCount(threadcount); output->SetBufferSize(buffersize); const unsigned int size = static_cast<unsigned int>(AsyncOutputs.size()); AsyncOutputs.resize(size + 1); AsyncOutputs[size] = output; // If input is already connected, then set the type of the new output accordingly svlFilterInput* input = GetInput(); if (input->IsConnected()) output->SetType(input->GetType()); return SVL_OK; }
void avtTimeIteratorExpression::FinalizeTimeLoop() { int numStates = GetInput()->GetInfo().GetAttributes().GetNumStates(); if (firstTimeSlice < 0) { firstTimeSlice = 0; } if (lastTimeSlice < 0) { lastTimeSlice = numStates - 1; } if (timeStride < 0) { timeStride = 1; } if (firstTimeSlice >= lastTimeSlice) { std::string msg("Start time must be smaller than end time for " ); msg += GetType(); msg += ".\n"; EXCEPTION1(ImproperUseException, msg); } numTimeSlicesToProcess = (lastTimeSlice-firstTimeSlice)/timeStride+1; if (lastTimeSlice >= numStates) { std::string msg(GetType()); msg += ": Clamping end time to number of available timesteps."; avtCallback::IssueWarning(msg.c_str()); } // // Ensure that the specified lastTimeSlice is included, // regardless of the timeStride. // actualLastTimeSlice = firstTimeSlice + (numTimeSlicesToProcess-1)*timeStride; if (actualLastTimeSlice < lastTimeSlice) { numTimeSlicesToProcess++; actualLastTimeSlice = lastTimeSlice; } }
void avtDataObjectSink::SetInput(avtDataObject_p in) { avtDataObject_p currentInput = GetInput(); if (*in == *currentInput) { debug1 << "A data object sink's current input was fed back into " << "itself, ignoring..." << endl; return; } // // This will throw an exception if any problems occur, so we can just // assume that it works. // SetTypedInput(in); ChangedInput(); }
void avtDataBinningFilter::UpdateDataObjectInfo(void) { avtDataAttributes &inAtts = GetInput()->GetInfo().GetAttributes(); avtDataAttributes &dataAtts = GetOutput()->GetInfo().GetAttributes(); dataAtts.AddVariable(varname); dataAtts.SetActiveVariable(varname.c_str()); dataAtts.SetVariableDimension(1); dataAtts.SetVariableType(AVT_SCALAR_VAR); if (atts.GetOutputType() == DataBinningAttributes::OutputOnBins) { if (atts.GetNumDimensions() == DataBinningAttributes::One) dataAtts.SetCentering(AVT_NODECENT); else dataAtts.SetCentering(AVT_ZONECENT); SetAxisNamesAndUnits(); } }