bool MidiParser::jumpToTick(uint32 tick, bool fireEvents, bool stopNotes, bool dontSendNoteOn) { if (_active_track >= _num_tracks) return false; Tracker currentPos(_position); EventInfo currentEvent(_next_event); resetTracking(); _position._play_pos = _tracks[_active_track]; parseNextEvent(_next_event); if (tick > 0) { while (true) { EventInfo &info = _next_event; if (_position._last_event_tick + info.delta >= tick) { _position._play_time += (tick - _position._last_event_tick) * _psec_per_tick; _position._play_tick = tick; break; } _position._last_event_tick += info.delta; _position._last_event_time += info.delta * _psec_per_tick; _position._play_tick = _position._last_event_tick; _position._play_time = _position._last_event_time; if (info.event == 0xFF) { if (info.ext.type == 0x2F) { // End of track _position = currentPos; _next_event = currentEvent; return false; } else { if (info.ext.type == 0x51 && info.length >= 3) // Tempo setTempo(info.ext.data[0] << 16 | info.ext.data[1] << 8 | info.ext.data[2]); if (fireEvents) _driver->metaEvent(info.ext.type, info.ext.data, (uint16) info.length); } } else if (fireEvents) { if (info.event == 0xF0) { if (info.ext.data[info.length-1] == 0xF7) _driver->sysEx(info.ext.data, (uint16)info.length-1); else _driver->sysEx(info.ext.data, (uint16)info.length); } else { // The note on sending code is used by the SCUMM engine. Other engine using this code // (such as SCI) have issues with this, as all the notes sent can be heard when a song // is fast-forwarded. Thus, if the engine requests it, don't send note on events. if (info.command() == 0x9 && dontSendNoteOn) { // Don't send note on; doing so creates a "warble" with some instruments on the MT-32. // Refer to patch #3117577 } else { sendToDriver(info.event, info.basic.param1, info.basic.param2); } } } parseNextEvent(_next_event); } } if (stopNotes) { if (!_smartJump || !currentPos._play_pos) { allNotesOff(); } else { EventInfo targetEvent(_next_event); Tracker targetPosition(_position); _position = currentPos; _next_event = currentEvent; hangAllActiveNotes(); _next_event = targetEvent; _position = targetPosition; } } _abort_parse = true; return true; }
void VisionSubsystem::ProcessImage() { printf("Vision: Starting Vision Subsystem\n"); /////////////// // Seting Up // /////////////// targetVisable[TOP_TARGET] = false; targetVisable[MIDDLE_TARGET] = false; targetVisable[BOTTOM_TARGET] = false; targetDistances[TOP_TARGET] = 0.0; targetDistances[MIDDLE_TARGET] = 0.0; targetDistances[BOTTOM_TARGET] = 0.0; targetPositionX[TOP_TARGET] = 0.0; targetPositionY[TOP_TARGET] = 0.0; targetPositionX[MIDDLE_TARGET] = 0.0; targetPositionY[MIDDLE_TARGET] = 0.0; targetPositionX[BOTTOM_TARGET] = 0.0; targetPositionY[BOTTOM_TARGET] = 0.0; /* * This creates a object with the needed values for the processing * the image later on, for a certain color. */ printf("Vision: Setting the clor threshold values\n"); Threshold threshold(THRESHOLD_HUE_MIN, THRESHOLD_HUE_MAX, THRESHOLD_SATURATION_MIN, THRESHOLD_SATURATION_MAX, THRESHOLD_VALUE_MIN, THRESHOLD_VALUE_MAX); ParticleFilterCriteria2 criteria[] = { {IMAQ_MT_AREA, AREA_MINIMUM, 65535, false, false} }; /* * This is the function that sets up the axis camera to get images. * To use the camera on the second port on the cRIO, uncommet the second line below * with "192.168.0.90" in it. */ printf("Vision: Setting camera IP to 10.30.81.12/n"); AxisCamera &camera = AxisCamera::GetInstance("10.30.81.12"); //AxisCamera &camera = AxisCamera::GetInstance("192.168.0.90"); // // This creates a Color image, then on the second line it fills it with the image from the camera. printf("Vision: Creating ColorImage object image\n"); ColorImage *image; printf("Vision: Getting the Image from the camera \n"); image = camera.GetImage(); ////////////////////////////// // Image processing section // ////////////////////////////// //Process the image with the threshold values printf("Vision: Filtering the image with threshold values into object thresholdImage\n"); BinaryImage *thesholdImage = image->ThresholdHSV(threshold); //This will fill shape that is complete and fill in the inside of siad shape. printf("Vision: Filling in the convex shapes into the object of convexHullImage\n"); BinaryImage *convexHullImage = thesholdImage->ConvexHull(false); //This will get rid of random particles in the image that are notconcentrated enougth. printf("Vision: Filtering image for the unwanted random particles"); BinaryImage *filteredImage = convexHullImage->ParticleFilter(criteria, 1); //This creates a report that will be used later to idenify targets printf("Vision: Creating the report of the filtered Image\n"); vector<ParticleAnalysisReport> *reports = filteredImage->GetOrderedParticleAnalysisReports(); //This creates a data stucture that is used to score objects. scores = new Scores[reports->size()]; for (unsigned i = 0; i < reports->size(); i++) { ParticleAnalysisReport *report = &(reports->at(i)); scores[i].rectangularity = scoreRectangularity(report); scores[i].aspectRatioOuter = scoreAspectRatio(filteredImage, report, true); scores[i].aspectRatioInner = scoreAspectRatio(filteredImage, report, false); scores[i].xEdge = scoreXEdge(thesholdImage, report); scores[i].yEdge = scoreYEdge(thesholdImage, report); if(scoreCompare(scores[i], false)) { printf("Vision: particle: %d is High Goal centerX %f centerY: %f \n", i , report->center_mass_x_normalized, report->center_mass_y_normalized); printf("Vision: Distance: %f \n", computeDistance(thesholdImage, report, false)); targetPositionX[TOP_TARGET] = report->center_mass_x; targetPositionY[TOP_TARGET] = report->center_mass_y; targetDistances[TOP_TARGET] = computeDistance(thesholdImage, report, false); targetVisable[TOP_TARGET] = true; targetPositionX[TOP_TARGET] = targetPosition(TOP_TARGET, true); targetPositionY[TOP_TARGET] = targetPosition(TOP_TARGET, false); } else if (scoreCompare(scores[i], true)){ printf("Vision: particle: %d is Middle Goal centerX %f centerY: %f \n", i , report->center_mass_x_normalized, report->center_mass_y_normalized); printf("Vision: Distance: %f \n", computeDistance(thesholdImage, report, true)); targetPositionX[MIDDLE_TARGET] = report->center_mass_x; targetPositionY[MIDDLE_TARGET] = report->center_mass_y; targetDistances[MIDDLE_TARGET] = computeDistance(thesholdImage, report, true); targetVisable[MIDDLE_TARGET] = true; targetPositionX[MIDDLE_TARGET] = targetPosition(MIDDLE_TARGET, true); targetPositionY[MIDDLE_TARGET] = targetPosition(MIDDLE_TARGET, false); } else { printf("Vision: particle %d is not a goal centerX: %f centery: %f \n" , i, report->center_mass_x_normalized, report->center_mass_y_normalized); } printf("Vision: rect: %f ARinner: %f \n", scores[i].rectangularity, scores[i].aspectRatioInner); printf("Vision: ARouter: %f xEdge: %f yEdge: %f \n", scores[i].aspectRatioOuter, scores[i].xEdge, scores[i].yEdge); } //printf("\n"); printf("Vision: Deleting the object filtered image\n"); delete filteredImage; printf("Vision: Deleting the objectconvexHullImage\n"); delete convexHullImage; printf("Vision: Deleting the object thresholdimage\n"); delete thesholdImage; printf("Vision: Deleting the object image"); delete image; delete scores; delete reports; printf("Vision: Done\n"); }
void PeaksAndValleys::init(int row, int column) { initMesh(row, column); //Create Effect Technique int shaderFlag = D3D10_SHADER_ENABLE_STRICTNESS; #if defined(DEBUG) || defined(_DEBUG ) shaderFlag |= D3D10_SHADER_DEBUG |D3D10_SHADER_SKIP_OPTIMIZATION; #endif ID3D10Device *device = DirectEngine::getInstance()->getDevice(); ID3D10Blob *errorBlob = nullptr; int result = D3DX10CreateEffectFromFile(L"color.fx", nullptr,nullptr, "fx_4_0", shaderFlag, 0, device, nullptr, nullptr, &_effect,&errorBlob,nullptr ); if (result < 0) { if (errorBlob) { MessageBoxA(nullptr, (char*)errorBlob->GetBufferPointer(), nullptr, 0); errorBlob->Release(); } DXTrace(__FILE__, __LINE__, result, L"D3DX10CreateEffectFromFile",true); } _effectTech = _effect->GetTechniqueByName("ColorTech"); _mvpMatrixV = _effect->GetVariableByName("g_MVPMatrix")->AsMatrix(); //Create Layout D3D10_INPUT_ELEMENT_DESC inputDesc[2]; inputDesc[0].SemanticName = "POSITION"; inputDesc[0].SemanticIndex = 0; inputDesc[0].Format = DXGI_FORMAT_R32G32B32_FLOAT; inputDesc[0].InputSlot = 0; inputDesc[0].AlignedByteOffset = 0; inputDesc[0].InputSlotClass = D3D10_INPUT_PER_VERTEX_DATA; inputDesc[0].InstanceDataStepRate = 0; inputDesc[1].SemanticName = "COLOR"; inputDesc[1].SemanticIndex = 0; inputDesc[1].Format = DXGI_FORMAT_R32G32B32A32_FLOAT; inputDesc[1].InputSlot = 0; inputDesc[1].AlignedByteOffset = sizeof(float) * 3; inputDesc[1].InputSlotClass = D3D10_INPUT_PER_VERTEX_DATA; inputDesc[1].InstanceDataStepRate = 0; // D3D10_PASS_DESC passDesc; _effectTech->GetPassByName("P0")->GetDesc(&passDesc); result = device->CreateInputLayout(inputDesc, 2, passDesc.pIAInputSignature,passDesc.IAInputSignatureSize,&_inputLayout); assert(result>=0); //Matrix D3DXMatrixIdentity(&_modelMatrix); D3DXMatrixIdentity(&_projMatrix); D3DXMatrixIdentity(&_viewMatrix); //Create Project Matrix auto &winSize = DirectEngine::getInstance()->getWinSize(); D3DXMatrixPerspectiveFovLH(&_projMatrix,M_PI/4,winSize.width/winSize.height,1.0f,400.0f); D3DXVECTOR3 eyePosition(0,80,-120); D3DXVECTOR3 targetPosition(0,0,0); D3DXVECTOR3 upperVec(0,1,0); D3DXMatrixLookAtLH(&_viewMatrix, &eyePosition, &targetPosition, &upperVec); }