예제 #1
0
void FlightControl::mainLoop()
{
	sei();

	// Get measurements
	gz = AccelGyro.getRotationZ();

	loopCounter++;

	flightPlan();

	switch(flightMode) {
	case Idle: idleMode(); // Do nothing. Stationary landed.
	break;
	case Hover: hoverMode(); // Maintain altitude. Can go forward in this mode.
	break;
	case TakeOff: takeOffMode(); // Increase throttle till height is achieved
	break;
	case Land: landMode(); // Decrease throttle to land smoothly
	break;
	case Turn: turnMode(); // Execute open loop turn
	break;
	}

	// Altitude control loop
	if (altPIDenabled) {
		if (alt > 200) {
			alt = 20;
		}
		if (loopCounter % ALT_PID_FREQ_DIV == 0) {
			alt = altLP.filter(AltSensor.value());
			int16_t err =  altSetPoint - alt;
			if (err > 0)
				altInput = altPID->loop(altSetPoint,alt);
			else
				altInput = alt2PID->loop(altSetPoint,alt);
			throttle = THROTTLE_SET + altInput;
			throttle = (throttle>THROTTLE_MAX) ? THROTTLE_MAX:throttle;
			throttle = (throttle<THROTTLE_MIN) ? THROTTLE_MIN:throttle;
		}
	}

	// Yaw Control loop
	if (yawPIDenabled) {
		yawInput = yawPID->loop(gyroSetPoint,gz);
		topRotDuty = throttle - yawInput;
		botRotDuty = throttle + yawInput;
	}

	// Set motor duty cycles
	setTopRotorDuty(topRotDuty);
	setBottomRotorDuty(botRotDuty);
}
예제 #2
0
inline void FlightControl::landMode() {
	throttle = throttle - THROTTLE_LAND_DELTA;
	throttle = (throttle<DUTY_MIN) ? DUTY_MIN:throttle;
	alt = altLP.filter(AltSensor.value());
	if (throttle==DUTY_MIN) {
		idle();
	}
	if (alt < 25 || alt > 200) {
		idle();
	}
	if (throttle < 300)
		yawPIDenabled = false;
}
예제 #3
0
파일: main.cpp 프로젝트: AliSayed/MoSync
	void sensorEvent(MASensor a) {

		// If the type of sensor data received is from the accelerometer
		if (a.type == SENSOR_TYPE_ACCELEROMETER) {

			// Filter the accelerometer gravity vector.
			Vector3 f = mFilter.filter(Vector3(a.values[0], a.values[1], a.values[2]));

			// And calculate the rotations. We don't pass the compass angle, just the
			// accelerometer gravity vector.
			mRotation = convertAccelerometerAndCompassDataToRadians(f, 0.0f);

			// Set the rotation.
			mRenderer->setRotation(
				convertRadiansToDegrees(mRotation.x),
				convertRadiansToDegrees(mRotation.y),
				convertRadiansToDegrees(mRotation.z)
				);
		}
	}
예제 #4
0
int main(int argc, const char * argv[]) {
    
//    VideoCapture cap(1); // open the default camera
//    if(!cap.isOpened())  // check if we succeeded
//        return -1;

#ifdef ENABLE_BLUEFOX
    BlueFoxCam* cam = new BlueFoxCam();
#else	//default opencv camera
    CvCapture* capture = cvCreateCameraCapture(-1);
    if (!capture)
        return -1;
    cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_WIDTH, WIDTH);
    cvSetCaptureProperty( capture, CV_CAP_PROP_FRAME_HEIGHT, HEIGHT);
    cvSetCaptureProperty( capture, CV_CAP_PROP_FPS, 60);
#endif

    LowPassFilter* lpf = new LowPassFilter(100, 0);
    Detector* detector;

    try{
    	detector = new Detector("../haarcascades/haarcascade_frontalface_alt.xml",
        	"../haarcascades/haarcascade_profileface.xml", "../haarcascades/haarcascade_eye.xml", SCALEFACTOR);
     }catch(runtime_error e){
        cout << e.what() << endl;
     }

    SleepDetector sd(SCALEFACTOR);
    Mat frame(HEIGHT, WIDTH, CV_8UC3);
    Mat scaled;
//    namedWindow("Acquisition");
//    namedWindow("Elaboration", WINDOW_NORMAL);
//    resizeWindow("Elaboration", WIDTH, HEIGHT);
//    namedWindow("Debug", WINDOW_NORMAL);
    //resizeWindow("Debug", 300, 300);
    
    Face prevface;
    //prevface.eyes.push_back(Rect(0,0,0,0));
    
    VideoStreamServer vss(SRV_ADDR, SRV_PORT);
    
    
    for(;;)
    {
    	#ifdef BLUEFOX_CAM
	    try{
        	cam->getImage(frame.data);
            }catch(runtime_error e){
        	cout << e.what() << endl;
            }
	#else	//default opencv camera
	    frame = cvarrToMat(cvQueryFrame(capture), true);
	#endif //BLUEFOX_CAM
        
        detector->prepareImage(frame, scaled, prevface.face);
        
        
        if(prevface.eye.x){	//eye already detected, so perform track only
            //detector.display(prevface, frame);
            if(prevface.eyeOpen == 2){
            	this_thread::sleep_for(std::chrono::milliseconds(100));
            	if(lpf->Perform_digital(sd.isOpen(prevface.eyetpl, SleepDetector::SD_ADAPTIVE_THRESHOLDING)? 2 : 0) == 0){
            		cout << "Beep!----------------------------------------------------" << endl;
            	}
           	}else
           		prevface.eyeOpen = lpf->Perform_digital(sd.isOpen(prevface.eyetpl, SleepDetector::SD_ADAPTIVE_THRESHOLDING)? 2 : 0);
           	
            sd.display(frame, prevface.eye.tl());
            //imshow("Elaboration", prevface.eyetpl);
            detector->trackEye(scaled, prevface);
        }
        else	//eye not yet detected
        {
        	lpf->Perform_analog(1);
            detector->detect(scaled, prevface);
        }


        detector->display(prevface, frame);
//        imshow("Aquisition", frame);
        if(vss.isRunning()){
        	vss.queueFrame(frame);
        }
        	
        if(waitKey(10) >= 0) break;
    }
    vss.stop();

    // deinitialize camera
#ifdef ENABLE_BLUEFOX
    delete cam;
#endif
    
    delete detector;
    delete lpf;
    return 0;
}
예제 #5
0
  KeyDetectionResult KeyFinder::findKey(const AudioData& originalAudio, const Parameters& params){

    KeyDetectionResult result;

    AudioData* workingAudio = new AudioData(originalAudio);

    workingAudio->reduceToMono();

    // TODO: there is presumably some good maths to determine filter frequencies
    float lpfCutoff = params.getLastFrequency() * 1.05;
    float dsCutoff = params.getLastFrequency() * 1.10;
    unsigned int downsampleFactor = (int)floor( workingAudio->getFrameRate() / 2 / dsCutoff );

    // get filter
    LowPassFilter* lpf = lpfFactory.getLowPassFilter(160, workingAudio->getFrameRate(), lpfCutoff, 2048);
    // feeding in the downsampleFactor for a shortcut
    lpf->filter(workingAudio, downsampleFactor);
    // note we don't delete the LPF; it's stored in the factory for reuse

    Downsampler ds;
    ds.downsample(workingAudio, downsampleFactor);

    SpectrumAnalyser* sa = new SpectrumAnalyser(workingAudio->getFrameRate(), params, &ctFactory);

    // run spectral analysis
    Chromagram* ch = sa->chromagram(workingAudio);

    delete workingAudio;
    delete sa;

    // reduce chromagram
    ch->reduceTuningBins(params);
    result.fullChromagram = Chromagram(*ch);
    ch->reduceToOneOctave(params);
    result.oneOctaveChromagram = Chromagram(*ch);

    // get harmonic change signal
    Segmentation* segmenter = Segmentation::getSegmentation(params);
    result.harmonicChangeSignal = segmenter->getRateOfChange(*ch, params);

    // get track segmentation
    std::vector<unsigned int> segmentBoundaries = segmenter->getSegments(result.harmonicChangeSignal, params);
    segmentBoundaries.push_back(ch->getHops()); // sentinel
    delete segmenter;

    // get key estimates for each segment
    KeyClassifier hc(params);
    std::vector<float> keyWeights(24); // TODO: not ideal using int cast of key_t enum. Hash?

    for (int s = 0; s < (signed)segmentBoundaries.size()-1; s++){
      KeyDetectionSegment segment;
      segment.firstHop = segmentBoundaries[s];
      segment.lastHop = segmentBoundaries[s+1] - 1;
      // collapse segment's time dimension
      std::vector<float> segmentChroma(ch->getBins());
      for (unsigned int hop = segment.firstHop; hop <= segment.lastHop; hop++) {
        for (unsigned int bin = 0; bin < ch->getBins(); bin++) {
          float value = ch->getMagnitude(hop, bin);
          segmentChroma[bin] += value;
          segment.energy += value;
        }
      }
      segment.key = hc.classify(segmentChroma);
      if(segment.key != SILENCE){
        keyWeights[segment.key] += segment.energy;
      }
      result.segments.push_back(segment);
    }

    delete ch;

    // get global key
    result.globalKeyEstimate = SILENCE;
    float mostCommonKeyWeight = 0.0;
    for (int k = 0; k < (signed)keyWeights.size(); k++){
      if(keyWeights[k] > mostCommonKeyWeight){
        mostCommonKeyWeight = keyWeights[k];
        result.globalKeyEstimate = (key_t)k;
      }
    }

    return result;

  }