// Follow a line, after we've been aligned to it, stopping based on color, distance, or time void _followLine(FloorColor color, int speed, FloorColor stopColor, int stopDistance, int stopTime) { // Reset the drive encoder if we're going to use it if (stopDistance != 0) { resetDriveEncoder(); } // Loop until we hit a stop condition while (true) { if (stopColor != UNKNOWN && (onColor(stopColor, LSvalRaw(lineRight)))) { break; } else if (stopDistance > 0 && readDriveEncoder() > stopDistance) { break; } else if (stopDistance < 0 && readDriveEncoder() < stopDistance) { break; } else if (stopTime > 0 && true) { // Can we check elapsed time? break; } if (onColor(color, LSvalRaw(lineLeft))) { runDriveMotors(0, speed); } else if (onColor(color, LSvalRaw(lineRight))) { runDriveMotors(speed, 0); } else { runDriveMotors(speed, speed); } } // Always stop when we're done stopDriveMotors(); }
// Determine the color under the sensor FloorColor floorColor(int sensorVal) { int color; for (color = 0; color < NUM_COLORS; color++) { if (onColor(color, sensorVal)) { return (FloorColor)color; } } return UNKNOWN; }
// Align to a line, assuming we start at or just beyond it, but stop if we hit the STOP color with either sensor void alignLine(FloorColor color, int speed, FloorColor stop = UNKNOWN, bool reverse = false) { // Spin ~180 if we've been asked to align in reverse if (reverse) { // Nudge forward runDriveMotors(100, 100); wait1Msec(150); // Spin in place runDriveMotors(100, -100); wait1Msec(1500); stopDriveMotors(); } // Start our turn to get the front sensor on the line while(!onColor(color, LSvalRaw(lineLeft))) { // Stop alignment efforts if we hit the specified stop floor color if (stop != UNKNOWN && (onColor(stop, LSvalRaw(lineRight)) || onColor(stop, LSvalRaw(lineLeft)))) { break; } // Turn in place runDriveMotors(speed, -1 * speed); } stopDriveMotors(); // When the back sensor is on the line, we're aligned while(!onColor(color, LSvalRaw(lineRight))) { // Stop alignment efforts if we hit the specified stop floor color if (stop != UNKNOWN && (onColor(stop, LSvalRaw(lineRight)) || onColor(stop, LSvalRaw(lineLeft)))) { break; } // Drive forward until the front sensor is clear of the line // TODO: Adjust onColor to provide more useful results, so we can invert this test if (!onColor(GREY, LSvalRaw(lineLeft))) { runDriveMotors(speed, speed); // Turn while neither sensor is on the line } else if (!onColor(color, LSvalRaw(lineRight)) && !onColor(color, LSvalRaw(lineLeft))) { runDriveMotors(speed, 0); } } // Always stop when we're done stopDriveMotors(); }
int QLed::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QWidget::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { if (_id < 5) qt_static_metacall(this, _c, _id, _a); _id -= 5; } #ifndef QT_NO_PROPERTIES else if (_c == QMetaObject::ReadProperty) { void *_v = _a[0]; switch (_id) { case 0: *reinterpret_cast< bool*>(_v) = value(); break; case 1: *reinterpret_cast< ledColor*>(_v) = onColor(); break; case 2: *reinterpret_cast< ledColor*>(_v) = offColor(); break; case 3: *reinterpret_cast< ledShape*>(_v) = shape(); break; } _id -= 4; } else if (_c == QMetaObject::WriteProperty) { void *_v = _a[0]; switch (_id) { case 0: setValue(*reinterpret_cast< bool*>(_v)); break; case 1: setOnColor(*reinterpret_cast< ledColor*>(_v)); break; case 2: setOffColor(*reinterpret_cast< ledColor*>(_v)); break; case 3: setShape(*reinterpret_cast< ledShape*>(_v)); break; } _id -= 4; } else if (_c == QMetaObject::ResetProperty) { _id -= 4; } else if (_c == QMetaObject::QueryPropertyDesignable) { _id -= 4; } else if (_c == QMetaObject::QueryPropertyScriptable) { _id -= 4; } else if (_c == QMetaObject::QueryPropertyStored) { _id -= 4; } else if (_c == QMetaObject::QueryPropertyEditable) { _id -= 4; } else if (_c == QMetaObject::QueryPropertyUser) { _id -= 4; } #endif // QT_NO_PROPERTIES return _id; }
int View3D::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QWidget::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: selectionChanged(); break; case 1: fitAll(); break; case 2: fitArea(); break; case 3: zoom(); break; case 4: zoomplus(); break; case 5: zoomminus(); break; case 6: pan(); break; case 7: globalPan(); break; case 8: front(); break; case 9: back(); break; case 10: top(); break; case 11: bottom(); break; case 12: left(); break; case 13: right(); break; case 14: axo(); break; case 15: rotation(); break; case 16: reset(); break; case 17: hlrOn(); break; case 18: hlrOff(); break; case 19: updateToggled((*reinterpret_cast< bool(*)>(_a[1]))); break; case 20: onBackground(); break; case 21: timerEvent(); break; case 22: onWireframe(); break; case 23: onShading(); break; case 24: onColor(); break; case 25: onMaterial(); break; case 26: onMaterial((*reinterpret_cast< int(*)>(_a[1]))); break; case 27: onTransparency(); break; case 28: onTransparency((*reinterpret_cast< int(*)>(_a[1]))); break; case 29: onDelete(); break; case 30: onUnDelete(); break; } _id -= 31; } return _id; }
bool onBlack(int sensorVal) { return onColor(BLACK, sensorVal); }
bool onRed(int sensorVal) { return onColor(RED, sensorVal); }
bool onBlue(int sensorVal) { return onColor(BLUE, sensorVal); }
bool onGrey(int sensorVal) { return onColor(GREY, sensorVal); }
bool onWhite(int sensorVal) { return onColor(WHITE, sensorVal); }