/*static*/ void MediaProfiles::startElementHandler(void *userData, const char *name, const char **atts) { MediaProfiles *profiles = (MediaProfiles *) userData; if (strcmp("Video", name) == 0) { createVideoCodec(atts, profiles); } else if (strcmp("Audio", name) == 0) { createAudioCodec(atts, profiles); } else if (strcmp("VideoEncoderCap", name) == 0 && strcmp("true", atts[3]) == 0) { profiles->mVideoEncoders.add(createVideoEncoderCap(atts)); } else if (strcmp("AudioEncoderCap", name) == 0 && strcmp("true", atts[3]) == 0) { profiles->mAudioEncoders.add(createAudioEncoderCap(atts)); } else if (strcmp("VideoDecoderCap", name) == 0 && strcmp("true", atts[3]) == 0) { profiles->mVideoDecoders.add(createVideoDecoderCap(atts)); } else if (strcmp("AudioDecoderCap", name) == 0 && strcmp("true", atts[3]) == 0) { profiles->mAudioDecoders.add(createAudioDecoderCap(atts)); } else if (strcmp("EncoderOutputFileFormat", name) == 0) { profiles->mEncoderOutputFileFormats.add(createEncoderOutputFileFormat(atts)); } else if (strcmp("CamcorderProfiles", name) == 0) { profiles->mCurrentCameraId = getCameraId(atts); profiles->addStartTimeOffset(profiles->mCurrentCameraId, atts); } else if (strcmp("EncoderProfile", name) == 0) { profiles->mCamcorderProfiles.add( createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds)); } else if (strcmp("ImageEncoding", name) == 0) { profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts); } }
/*Create New Appointment if Interval is not Once*/ void Camera_Appointment::createNewAppointmentBasedFromInterval(sql::Connection *con){ sql::PreparedStatement *prep_stmt; string new_datetime = getNextIntervalDateTime(); prep_stmt = con->prepareStatement("INSERT INTO `camera_appointment` (`Date_Taken`, `Camera_ID`, `Interval`) VALUES (?, ?, ?)"); prep_stmt->setString(1, new_datetime); prep_stmt->setInt(2, getCameraId()); prep_stmt->setString(3, getInterval()); prep_stmt->execute(); delete prep_stmt; }
bool EmulatedCameraHotplugThread::threadLoop() { // If requestExit was already called, mRunning will be false while (mRunning) { char buffer[EVENT_BUF_LEN]; int length = TEMP_FAILURE_RETRY( read(mInotifyFd, buffer, EVENT_BUF_LEN)); if (length < 0) { ALOGE("%s: Error reading from inotify FD, error: '%s' (%d)", __FUNCTION__, strerror(errno), errno); mRunning = false; break; } ALOGV("%s: Read %d bytes from inotify FD", __FUNCTION__, length); int i = 0; while (i < length) { inotify_event* event = (inotify_event*) &buffer[i]; if (event->mask & IN_IGNORED) { Mutex::Autolock al(mMutex); if (!mRunning) { ALOGV("%s: Shutting down thread", __FUNCTION__); break; } else { ALOGE("%s: File was deleted, aborting", __FUNCTION__); mRunning = false; break; } } else if (event->mask & IN_CLOSE_WRITE) { int cameraId = getCameraId(event->wd); if (cameraId < 0) { ALOGE("%s: Got bad camera ID from WD '%d", __FUNCTION__, event->wd); } else { // Check the file for the new hotplug event String8 filePath = getFilePath(cameraId); /** * NOTE: we carefully avoid getting an inotify * for the same exact file because it's opened for * read-only, but our inotify is for write-only */ int newStatus = readFile(filePath); if (newStatus < 0) { mRunning = false; break; } int halStatus = newStatus ? CAMERA_DEVICE_STATUS_PRESENT : CAMERA_DEVICE_STATUS_NOT_PRESENT; gEmulatedCameraFactory.onStatusChanged(cameraId, halStatus); } } else { ALOGW("%s: Unknown mask 0x%x", __FUNCTION__, event->mask); } i += EVENT_SIZE + event->len; } } if (!mRunning) { close(mInotifyFd); return false; } return true; }
//--------------------------------------------------------------- void CameraExporter::exportCamera( ExportNode* exportNode ) { if ( !exportNode->getIsInVisualScene() ) return; String cameraId = getCameraId(*exportNode); INode* iNode = exportNode->getINode(); CameraObject* camera = (CameraObject*)iNode->GetObjectRef(); INode* targetNode = ( camera->ClassID().PartA() == LOOKAT_CAM_CLASS_ID) ? iNode->GetTarget() : 0; if ( camera ) { if ( mDocumentExporter->isExportedObject(ObjectIdentifier(camera)) ) return; mDocumentExporter->insertExportedObject(ObjectIdentifier(camera), exportNode); // Retrieve the camera parameters block IParamBlock* parameters = (IParamBlock*) camera->GetReference(MaxCamera::PBLOCK_REF); COLLADASW::BaseOptic * optics = 0; if ( camera->IsOrtho() ) { optics = new COLLADASW::OrthographicOptic(COLLADASW::LibraryCameras::mSW); // Calculate the target distance for FOV calculations float targetDistance; if ( targetNode ) { Point3 targetTrans = targetNode->GetNodeTM(mDocumentExporter->getOptions().getAnimationStart()).GetTrans(); Point3 cameraTrans = iNode->GetNodeTM(mDocumentExporter->getOptions().getAnimationStart()).GetTrans(); targetDistance = (targetTrans - cameraTrans).Length(); } else { targetDistance = camera->GetTDist(mDocumentExporter->getOptions().getAnimationStart()); } ConversionInverseOrthoFOVFunctor conversionInverseOrthoFOVFunctor(targetDistance); if ( AnimationExporter::isAnimated(parameters, MaxCamera::FOV) ) { optics->setXMag(conversionInverseOrthoFOVFunctor(parameters->GetFloat(MaxCamera::FOV)), XMAG_SID); mAnimationExporter->addAnimatedParameter(parameters, MaxCamera::FOV, cameraId, XMAG_SID, 0, true, &conversionInverseOrthoFOVFunctor); } else { optics->setXMag(conversionInverseOrthoFOVFunctor(parameters->GetFloat(MaxCamera::FOV))); } } else { optics = new COLLADASW::PerspectiveOptic(COLLADASW::LibraryCameras::mSW); if ( AnimationExporter::isAnimated(parameters, MaxCamera::FOV) ) { optics->setXFov(COLLADASW::MathUtils::radToDegF(parameters->GetFloat(MaxCamera::FOV)), XFOV_SID); mAnimationExporter->addAnimatedParameter(parameters, MaxCamera::FOV, cameraId, XFOV_SID, 0, true, &ConversionFunctors::radToDeg); } else { optics->setXFov(COLLADASW::MathUtils::radToDegF(parameters->GetFloat(MaxCamera::FOV))); } } bool hasAnimatedZNear = mAnimationExporter->addAnimatedParameter(parameters, MaxCamera::NEAR_CLIP, cameraId, optics->getZNearDefaultSid(), 0); optics->setZNear(parameters->GetFloat(MaxCamera::NEAR_CLIP), hasAnimatedZNear); bool hasAnimatedZFar = mAnimationExporter->addAnimatedParameter(parameters, MaxCamera::FAR_CLIP, cameraId, optics->getZFarDefaultSid(), 0); optics->setZFar(parameters->GetFloat(MaxCamera::FAR_CLIP), hasAnimatedZFar); #ifdef UNICODE String exportNodeName = COLLADABU::StringUtils::wideString2utf8String(exportNode->getINode()->GetName()); COLLADASW::Camera colladaCamera(COLLADASW::LibraryCameras::mSW, optics, cameraId, COLLADASW::Utils::checkNCName(exportNodeName)); #else COLLADASW::Camera colladaCamera(COLLADASW::LibraryCameras::mSW, optics, cameraId, COLLADASW::Utils::checkNCName(exportNode->getINode()->GetName())); #endif setExtraTechnique(&colladaCamera); // Retrieve the camera target if ( targetNode ) { ExportNode* targetExportNode = mExportSceneGraph->getExportNode(targetNode); addExtraParameter(EXTRA_PARAMETER_TARGET, "#" + targetExportNode->getId()); } if (camera->GetMultiPassEffectEnabled(0, FOREVER)) { IMultiPassCameraEffect *multiPassCameraEffect = camera->GetIMultiPassCameraEffect(); if (multiPassCameraEffect) { Class_ID id = multiPassCameraEffect->ClassID(); // the camera could have both effects, but not in Max if (id == FMULTI_PASS_MOTION_BLUR_CLASS_ID) { IParamBlock2 *parameters = multiPassCameraEffect->GetParamBlock(0); if (parameters ) { addParamBlockAnimatedExtraParameters(MOTION_BLUR_ELEMENT, MOTION_BLUR_PARAMETERS, MOTION_BLUR_PARAMETER_COUNT, parameters, cameraId); } } else if (id == FMULTI_PASS_DOF_CLASS_ID) { IParamBlock2 *parameters = multiPassCameraEffect->GetParamBlock(0); if (parameters ) { addParamBlockAnimatedExtraParameters(DEPTH_OF_FIELD_ELEMENT, DEPTH_OF_FIELD_PARAMETERS, DEPTH_OF_FIELD_PARAMETER_COUNT, parameters, cameraId); addExtraParameter(TARGETDISTANCE_PARAMETER, camera->GetTDist(0)); } } } } addCamera(colladaCamera); delete optics; } }
void Camera_Appointment::printDetails(){ cout << "Next appointment: " << getDateTaken() << endl; cout << "ID: " << getId() << endl; cout << "Camera_ID: " << getCameraId() << endl; cout << "Interval: " << getInterval() << endl; }