Ejemplo n.º 1
0
void CResizeWnd::OnLButtonDown(UINT nFlags, CPoint point)
{
	CRect r;
	GetClientRect(r);

	if (m_vertical)
	{
		r.left = point.x - 1;
		r.right = point.x + 1;
		r.top = -2048;
		r.bottom = 2048;
	}
	else
	{
		r.top = point.y - 1;
		r.bottom = point.y + 1;
		r.left = -2048;
		r.right = 2048;
	}

	CRectTracker tracker(r, CRectTracker::hatchInside);

	if (m_vertical)
	{
		tracker.Track(this, point, FALSE, AfxGetMainWnd());
		m_adjust_width = tracker.m_rect.left;
		m_adjust_height = 0;
		AfxGetMainWnd()->PostMessage(WM_COMMAND, ID_RESIZE);
	}
	else
	{
		tracker.Track(this, point, FALSE, GetParent());
		m_adjust_width = 0;
		m_adjust_height = tracker.m_rect.top - point.y;
		GetParent()->PostMessage(WM_COMMAND, ID_HORZ_RESIZE);
	}

	CWnd::OnLButtonDown(nFlags, point);
}
Ejemplo n.º 2
0
int main() {

    ARToolKitPlus::TrackerSingleMarker tracker(640, 480);
    tracker.init(NULL, 0, 0);
    ARToolKitPlus::Camera* camera = new ARToolKitPlus::Camera();
    camera->xsize = 640;
    camera->ysize = 480;
    camera->mat[0][0] = 596.28278;
    camera->mat[1][0] = 0;
    camera->mat[2][0] = 0;
    camera->mat[0][1] = 0;
    camera->mat[1][1] = 598.22026;
    camera->mat[2][1] = 0;
    camera->mat[0][2] = 683.95141;
    camera->mat[1][2] = 450.66728;
    camera->mat[2][2] = 1.0;
    camera->mat[0][3] = 0;
    camera->mat[1][3] = 0;
    camera->mat[2][3] = 0;

    camera->kc[0] = -0.27569944;
    camera->kc[1] = 0.09206397200000001;
    camera->kc[2] =  0.0001978362;
    camera->kc[3] = 0.00034858682;
    camera->kc[4] = -0.014973806000000001;

    camera->fc[0] = camera->mat[0][0];
    camera->fc[1] = camera->mat[1][1];
    camera->cc[0] = camera->mat[0][2];
    camera->cc[1] = camera->mat[1][2];

    tracker.setCamera(camera, 0., 1000.);

    tracker.getCamera()->printSettings();
//    camera->printSettings();


}
Ejemplo n.º 3
0
/*-----------------------------------------------------------------------------
	Add a browser interface to the list of browser interfaces
-----------------------------------------------------------------------------*/
void CPagetestBase::AddBrowser(CComPtr<IWebBrowser2> browser)
{
	CBrowserTracker tracker(browser);

	EnterCriticalSection(&cs);
	browsers.AddTail(tracker);

  if( !hMainWindow )
    browser->get_HWND((LONG *)&hMainWindow);
	
	// create a window for the thread if necessary
	HWND hWnd = NULL;
	if( !threadWindows.Lookup(tracker.threadId, hWnd) )
	{
		LeaveCriticalSection(&cs);

		TCHAR wndClassName[100];
		wsprintf(wndClassName, _T("Pagetest Thread Window %08X"), tracker.threadId);
		
		WNDCLASS wndClass;
		memset(&wndClass, 0, sizeof(wndClass));
		wndClass.lpszClassName = wndClassName;
		wndClass.lpfnWndProc = ThreadWindowProc;
		wndClass.hInstance = _AtlBaseModule.GetModuleInstance();
		if( RegisterClass(&wndClass) )
		{
			hWnd = ::CreateWindow(wndClassName, wndClassName, WS_POPUP, 0, 0, 0, 0, NULL, NULL, _AtlBaseModule.GetModuleInstance(), NULL);
			if( hWnd )
			{
				EnterCriticalSection(&cs);
				threadWindows.SetAt(tracker.threadId, hWnd);
				LeaveCriticalSection(&cs);
			}
		}
	}
	else
		LeaveCriticalSection(&cs);
}
int main(int argc, char **argv)
{
  ros::init(argc, argv, "dot_tracker");
  try
    {
      DotTracker tracker(1);
      if (ros::ok())
		tracker.spin();
	  ROS_INFO_STREAM("Finishing spinning");
    }
  catch (std::exception& e)
    {
      std::cerr << "fatal error: " << e.what() << std::endl;
      ROS_ERROR_STREAM("fatal error: " << e.what());
      return 1;
    }
  catch (...)
    {
      ROS_ERROR_STREAM("unexpected error");
      return 2;
    }
  return 0;
}
Status MigrationChunkClonerSourceLegacy::nextCloneBatch(OperationContext* txn,
                                                        Collection* collection,
                                                        BSONArrayBuilder* arrBuilder) {
    dassert(txn->lockState()->isCollectionLockedForMode(_args.getNss().ns(), MODE_IS));

    ElapsedTracker tracker(txn->getServiceContext()->getFastClockSource(),
                           internalQueryExecYieldIterations,
                           Milliseconds(internalQueryExecYieldPeriodMS.load()));

    stdx::lock_guard<stdx::mutex> sl(_mutex);

    std::set<RecordId>::iterator it;

    for (it = _cloneLocs.begin(); it != _cloneLocs.end(); ++it) {
        // We must always make progress in this method by at least one document because empty return
        // indicates there is no more initial clone data.
        if (arrBuilder->arrSize() && tracker.intervalHasElapsed()) {
            break;
        }

        Snapshotted<BSONObj> doc;
        if (collection->findDoc(txn, *it, &doc)) {
            // Use the builder size instead of accumulating the document sizes directly so that we
            // take into consideration the overhead of BSONArray indices.
            if (arrBuilder->arrSize() &&
                (arrBuilder->len() + doc.value().objsize() + 1024) > BSONObjMaxUserSize) {
                break;
            }

            arrBuilder->append(doc.value());
        }
    }

    _cloneLocs.erase(_cloneLocs.begin(), it);

    return Status::OK();
}
Ejemplo n.º 6
0
/**
 * Test StatisticsTracker
 */
void test_statisticstracker()
{

    StatisticsTrackerOptions options;
    options.doCov = options.doMax = options.doMean = options.doMin = options.doVar = true;
    
    StatisticsTracker tracker("test", 0, 0, 0, 0, 0, options);
    tracker.update(1);
    tracker.update(2);
    tracker.update(3);
    tracker.update(4);
    tracker.update(5);

    ASSERT_EQUAL(tracker.getCount(), 5, "StatisticsTracker::getCount failed");
    ASSERT_EQUAL(tracker.getMinimum(), 1, "StatisticsTracker::getMin failed");
    ASSERT_EQUAL(tracker.getMean(), 3, "StatisticsTracker::getMean failed");
    ASSERT_EQUAL(tracker.getMaximum(), 5, "StatisticsTracker::getMax failed");
    ASSERT_EQUAL(tracker.getName(), "test", "StatisticsTracker::getName failed");
    ASSERT_EQUAL(tracker.getVariance(), 2, "StatisticsTracker::getVariance failed");
    
    tracker.setName("test2");
    ASSERT_EQUAL(tracker.getName(), "test2", "StatisticsTracker::setName failed");

}
Ejemplo n.º 7
0
void main_simulation()
#endif
{
    // inputs
    double fitness;

    #ifdef OPTI
    double *optiParams;
    #endif

    Loop_inputs *loop_inputs;

    // initialization
	loop_inputs = init_simulation();

    // optimization init
    #ifdef OPTI

    optiParams = (double*) malloc(NB_PARAMS_TO_OPTIMIZE*sizeof(double));

    get_real_params_to_opt(optiNorms, optiParams);

    erase_for_opti(optiParams, loop_inputs->MBSdata);

    free(optiParams);

    #endif
    
    // -- Simbody -- //

    #ifdef SIMBODY

    // / Create the system. Define all "system" objects - system, matterm forces, tracker, contactForces.
    MultibodySystem           system;
    SimbodyMatterSubsystem    matter(system);
    ContactTrackerSubsystem   tracker(system); 
    CompliantContactSubsystem contactForces(system, tracker);
    system.setUpDirection(+ZAxis); // that is for visualization only. The default direction is +X


    SimbodyVariables simbodyVariables;

    // set all the mechanical parameters of the contact
    
    simbodyVariables.p_system        = &system;
    simbodyVariables.p_matter        = &matter;
    simbodyVariables.p_tracker       = &tracker;
    simbodyVariables.p_contactForces = &contactForces;

    //  cout<<"BoxInd in Main = "<<BoxInd<<" -- should be default \n";
    
    //init_Simbody(&simbodyVariables);

    init_Simbody(&simbodyVariables, loop_inputs->MBSdata->user_IO->simbodyBodies);
    
    //it is "system" commands. We cannot avoid them.    
    system.realizeTopology();
    State state = system.getDefaultState();

    simbodyVariables.p_state = &state;

    //it is "system" command. We cannot avoid them. 
    system.realizeModel(state);

    p_simbodyVariables = &simbodyVariables;

    #endif

	// loop
    fitness = loop_simulation(loop_inputs);

    // end of the simulation
    finish_simulation(loop_inputs);

    #ifdef OPTI

    return fitness;

    #else

    #if defined(PRINT_REPORT)
    printf("fitness: %f\n", fitness);
    #endif

    #endif
}
Ejemplo n.º 8
0
TFilePanel::TFilePanel(file_panel_mode mode, BMessenger* target,
	const BEntry* startDir, uint32 nodeFlavors, bool multipleSelection,
	BMessage* message, BRefFilter* filter, uint32 containerWindowFlags,
	window_look look, window_feel feel, bool hideWhenDone)
	:
	BContainerWindow(0, containerWindowFlags, look, feel, 0,
		B_CURRENT_WORKSPACE),
	fDirMenu(NULL),
	fDirMenuField(NULL),
	fTextControl(NULL),
	fClientObject(NULL),
	fSelectionIterator(0),
	fMessage(NULL),
	fHideWhenDone(hideWhenDone),
	fIsTrackingMenu(false)
{
	InitIconPreloader();

	fIsSavePanel = (mode == B_SAVE_PANEL);

	BRect windRect(85, 50, 568, 296);
	MoveTo(windRect.LeftTop());
	ResizeTo(windRect.Width(), windRect.Height());

	fNodeFlavors = (nodeFlavors == 0) ? B_FILE_NODE : nodeFlavors;

	if (target)
		fTarget = *target;
	else
		fTarget = BMessenger(be_app);

	if (message)
		SetMessage(message);
	else if (fIsSavePanel)
		fMessage = new BMessage(B_SAVE_REQUESTED);
	else
		fMessage = new BMessage(B_REFS_RECEIVED);

	gLocalizedNamePreferred
		= BLocaleRoster::Default()->IsFilesystemTranslationPreferred();

	// check for legal starting directory
	Model* model = new Model();
	bool useRoot = true;

	if (startDir) {
		if (model->SetTo(startDir) == B_OK && model->IsDirectory())
			useRoot = false;
		else {
			delete model;
			model = new Model();
		}
	}

	if (useRoot) {
		BPath path;
		if (find_directory(B_USER_DIRECTORY, &path) == B_OK) {
			BEntry entry(path.Path(), true);
			if (entry.InitCheck() == B_OK && model->SetTo(&entry) == B_OK)
				useRoot = false;
		}
	}

	if (useRoot) {
		BVolume volume;
		BDirectory root;
		BVolumeRoster volumeRoster;
		volumeRoster.GetBootVolume(&volume);
		volume.GetRootDirectory(&root);

		BEntry entry;
		root.GetEntry(&entry);
		model->SetTo(&entry);
	}

	fTaskLoop = new PiggybackTaskLoop;

	AutoLock<BWindow> lock(this);
	CreatePoseView(model);
	fPoseView->SetRefFilter(filter);
	if (!fIsSavePanel)
		fPoseView->SetMultipleSelection(multipleSelection);

	fPoseView->SetFlags(fPoseView->Flags() | B_NAVIGABLE);
	fPoseView->SetPoseEditing(false);
	AddCommonFilter(new BMessageFilter(B_KEY_DOWN, key_down_filter));
	AddCommonFilter(new BMessageFilter(B_SIMPLE_DATA,
		TFilePanel::MessageDropFilter));
	AddCommonFilter(new BMessageFilter(B_NODE_MONITOR, TFilePanel::FSFilter));

	// inter-application observing
	BMessenger tracker(kTrackerSignature);
	BHandler::StartWatching(tracker, kDesktopFilePanelRootChanged);

	Init();
}
	void GL3FrameBufferProvider::detach_depth_stencil()
	{
		FrameBufferStateTracker tracker(bind_target, handle, gc_provider);
		detach_object(GL_DEPTH_STENCIL_ATTACHMENT);
	}
Ejemplo n.º 10
0
	void GL3FrameBufferProvider::attach_depth_stencil(const Texture2D &texture, int level)
	{
		FrameBufferStateTracker tracker(bind_target, handle, gc_provider);
		attach_object(GL_DEPTH_STENCIL_ATTACHMENT, texture, level, 0, 0);
	}
Ejemplo n.º 11
0
/*! \section example51 Example 51: (DOS) Result regularization using secondary criterion.

It is known that feature selection may over-fit. As in the case of over-trained classifiers,
over-selected feature subsets may generalize poorly. This unwanted effect can lead to
serious degradation of generalization ability, i.e., model or decision-rule behavior 
on previously unknown data. It has been suggested (Raudys: Feature Over-Selection, LNCS 4109, 2006, 
or Somol et al., ICPR 2010) that preferring a subset with slightly-worse-than-maximal criterion
value can actually improve generalization. FST3 makes this possible through result tracking
and subsequent selection of alternative solution by means of secondary criterion maximization.
In this example we show a 3-Nearest Neighbor Wrapper based feature selection process, where
the final result is eventually chosen among a group of solutions close enough to the achieved
maximum, so as to optimize the secondary criterion. The group of solutions to select from is defined 
by means of a user-selected margin value (permitted primary criterion value difference from the known
maximum). In this case we show that even the simplest secondary criterion (mere preference of 
smaller subsets) can improve classifcation accuracy on previously unknown data.
*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	//typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Distance_Euclid<DATATYPE,DIMTYPE,SUBSET> DISTANCE;
	typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCE> CLASSIFIERKNN;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPERKNN;
	typedef FST::Criterion_Subset_Size<RETURNTYPE,SUBSET> CRITSUBSIZE;
	typedef FST::Criterion_Negative<CRITSUBSIZE,RETURNTYPE,SUBSET> NEGATIVECRIT;
	typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN> EVALUATOR;
	typedef FST::Result_Tracker_Regularizer<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET,NEGATIVECRIT> TRACKER;

		std::cout << "Starting Example 51: (DOS) Result regularization using secondary criterion..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/waveform_40.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));  sub->deselect_all();
	// set-up 3-Nearest Neighbor classifier based on Euclidean distances
		boost::shared_ptr<CLASSIFIERKNN> cknn(new CLASSIFIERKNN); cknn->set_k(3);
	// wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERKNN> wknn(new WRAPPERKNN);
		wknn->initialize(cknn,da);
	// set-up the standard sequential search step object (option: hybrid, ensemble)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up Dynamic Oscillating Search procedure
		FST::Search_DOS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN,EVALUATOR> srch(eval);
		srch.set_delta(3);
	// set-up the regularizing result tracker
		boost::shared_ptr<TRACKER> tracker(new TRACKER);
	// register the result tracker with the used search step object
		eval->enable_result_tracking(tracker);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wknn << std::endl << *tracker << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		srch.set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		if(!srch.search(0,critval_train,sub,wknn,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		cknn->train(da,sub);
		cknn->test(critval_test,da);
		std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
	// set-up the secondary criterion (regularization criterion); in this case to minimize subset size
		boost::shared_ptr<CRITSUBSIZE> critsubsiz(new CRITSUBSIZE); //Criterion_Subset_Size does not need to be initialized
		boost::shared_ptr<NEGATIVECRIT> regulcrit(new NEGATIVECRIT(critsubsiz)); //Criterion_Negative does not need to be initialized
	// select final solution among those recorded by tracker (show more alternatives for various margins)
		tracker->set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		for(unsigned int i=1; i<10; i++) 
		{
			RETURNTYPE margin=(double)i*0.001;
			da->setSplittingDepth(1); // necessary with criteria than need access to training data
			if(!tracker->optimize_within_margin(margin,critval_train,critval_test,sub,regulcrit)) throw FST::fst_error("tracker->optimize_within_margin() failed.");
			std::cout << std::endl << "Regularized (margin="<<margin<<") result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl;
		// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
			da->setSplittingDepth(0);
			cknn->train(da,sub);
			cknn->test(critval_test,da);
			std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
		}
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
void
DotTracker::spin()
{
	int count = 0;
	boost::format fmtWindowTitle ("ViSP Dot tracker - [ns: %s]");
    fmtWindowTitle % ros::this_node::getNamespace ();

    vpDisplayX d(image_, image_.getWidth(), image_.getHeight(),
		 fmtWindowTitle.str().c_str());

	ros::Rate loop_rate_tracking(200);
	bool ok = false;
	vpHomogeneousMatrix cMo;
	vpImagePoint point (10, 10);
	while (!ok && ros::ok())
	{
		
		try
		  {
			//ROS_INFO_STREAM("spin once area");
			vpDisplay::display(image_);
			
			for(uint i=0;i<trackers_.size();i++){
				try{
					if(trackers_status_[i] == TRACKING){
						trackers_[i]->track(image_);
						trackers_[i]->display(image_, vpColor::red);
					}
				}catch(...){
					ROS_ERROR_STREAM("failed to track dot " << i);
					trackers_status_[i] = NOT_TRACKING;
				}
				
			}
			
			int N = points_.size();
			
			double srcData[N*2];
			double dstData[N*2];
			CvMat * src = cvCreateMat( N, 3, CV_64FC1);
			CvMat * dst = cvCreateMat( N, 3, CV_64FC1);
			
			CvMat * mask= cvCreateMat(1, N, CV_8UC1);
			for(int i = 0; i < N; i++ ){
				//model is src
				src->data.db[i*3] = points_[i].X;
				src->data.db[i*3+1] = points_[i].Y;
				src->data.db[i*3+2] = 1;
				
				//screen is dst
				dst->data.db[i*3] = trackers_[i]->getCog().get_i();
				dst->data.db[i*3+1] = trackers_[i]->getCog().get_j();
				dst->data.db[i*3+2] = 1;
				
				//ROS_INFO_STREAM("trackers_[i]->getCog() =" << trackers_[i]->getCog().get_i() << ", " << trackers_[i]->getCog().get_j());	
				//ROS_INFO_STREAM("points_[i] =" << points_[i].X << ", " << points_[i].Y);	
			}
			
				
			
			//cvFindHomography(src, dst, homography_, CV_LMEDS, 0, mask);
			cvFindHomography(src, dst, homography_, CV_RANSAC, 10, mask);
			
			std_msgs::Float64MultiArray homography_msg;
			
			//homography_msg.layout.set_dim_size(2);
			homography_msg.layout.dim.push_back(std_msgs::MultiArrayDimension());
			homography_msg.layout.dim[0].label  = "height";
			homography_msg.layout.dim[0].size   = 3;
			homography_msg.layout.dim[0].stride = 3*3;
			homography_msg.layout.dim.push_back(std_msgs::MultiArrayDimension());
			homography_msg.layout.dim[1].label  = "width";
			homography_msg.layout.dim[1].size   = 3;
			homography_msg.layout.dim[1].stride = 3;
			
			homography_msg.data.resize(3*3);
			for (int i = 0; i < homography_->rows; i++){
				for (int j = 0; j < homography_->cols; j++){
					float val = (float)cvGetReal2D(homography_, i, j);
					homography_msg.data[j + 3 * i] = val;
				}	
			}
			
			homography_pub_.publish(homography_msg);
			
			/*
			if(count++ % 50 == 0){
					count =0;
					printMat(homography_);
			}*/
			
			//if(count == 49) return;	
						
			for(int i = 0; i < N; i++ ){ 
				//ROS_INFO_STREAM("mask " << i << " = " << mask->data.ptr[i]);	
				
				if(((int)(mask->data.ptr[i])) == 0 || trackers_status_[i] == NOT_TRACKING){
					trackers_status_[i] = NOT_TRACKING;			
					//note we have to transpose
					CvMat * dst2 = cvCreateMat( 3, 1, CV_64FC1); //screen (unkown)
					CvMat * src2 = cvCreateMat( 3, 1, CV_64FC1); //model (known)
					
					src2->data.db[0] = points_[i].X;
					src2->data.db[1] = points_[i].Y;
					src2->data.db[2] = 1.0;
					
					cvMatMul(homography_, src2, dst2);
					
					dst2->data.db[0] /= dst2->data.db[2];
					dst2->data.db[1] /= dst2->data.db[2];
					dst2->data.db[2] /= dst2->data.db[2];
					
					//ROS_INFO_STREAM("trackers_[i]->getCog() =" << trackers_[i]->getCog().get_i() << ", " << trackers_[i]->getCog().get_j());	
					//ROS_INFO_STREAM("for point number: " << i << " model x = " << points_[i].X << " model y = " << points_[i].Y);
					//ROS_INFO_STREAM("setting tracker "<< i << " to x = "<< dst2->data.db[0] << ", y = " << dst2->data.db[1] << "," << dst2->data.db[2]); 	
					
					try{
						//trackers_[i]->initTracking(image_, vpImagePoint(dst2->data.db[0], dst2->data.db[1]) , 15);//dodgy???	
						//trackers_[i]->getCog().set_i(dst2->data.db[0]);
						//trackers_[i]->getCog().set_j(dst2->data.db[1]);
						
						//ROS_INFO_STREAM("setting tracker "<< i << " to x = "<< dst2->data.db[0] << ", y = " << dst2->data.db[1] << "," << dst2->data.db[2]); 	
						
						//cut of any that go out of screen bounds
						if(dst2->data.db[0] < 1) continue;
						if(dst2->data.db[1] < 1) continue;
						if(dst2->data.db[0] > 478) continue;//I have no why idea the x needs to be set to 480 and not 640?
						if(dst2->data.db[1] > 638) continue;
						
						boost::shared_ptr<vpDot2> tracker(new vpDot2());
		
						tracker->setSizePrecision(.1);
						tracker->setEllipsoidShapePrecision(.8);
						tracker->setGrayLevelPrecision(.75);
						
						trackers_[i] = tracker;
						trackers_[i]->initTracking(image_, vpImagePoint(dst2->data.db[0], dst2->data.db[1]) , 15);
						/*
						trackers_[i]->track(image_);
						trackers_[i]->display(image_, vpColor::green);
						*/
						trackers_status_[i] = TRACKING;
					}catch(...){
						ROS_ERROR_STREAM("failed to track dot " << i);
						trackers_status_[i] = NOT_TRACKING;
					}
					
					//trackers_[i]->getCog().set_i(dst2->data.db[0]);
					//trackers_[i]->getCog().set_j(dst2->data.db[1]);
				}
			}
			/*
			ROS_INFO_STREAM("mask=");
			for(int i = 0; i < N; i++ ){
					ROS_INFO_STREAM((int)mask->data.ptr[i]<<" ");
			}
			ROS_INFO("\n");
			for(int i = 0; i < 3; i++ ){
				for(int j = 0; j < 3; j++ ){
				
					ROS_INFO_STREAM(hom_ret->data.db[i + j*3]<<" ");
				}
				ROS_INFO("\n");
			}
			ROS_INFO("\n");
			*/
			vpDisplay::flush(image_);
			ros::spinOnce();
			loop_rate_tracking.sleep();
		  }
		catch(const std::runtime_error& e)
		  {
			ROS_ERROR_STREAM("C failed to initialize: "
					 << e.what() << ", retrying...");
		  }
		catch(const std::string& str)
		  {
			ROS_ERROR_STREAM("B failed to initialize: "
					 << str << ", retrying...");
		  }
		catch(...)
		  {
			ROS_ERROR("A failed to initialize, retrying...");
		  }
	}
}
void DotTracker::pointCorrespondenceCallback(const visp_camera_calibration::CalibPointArrayPtr& msg){
	
	ROS_INFO_STREAM("pointCorrespondenceCallback");
	points_.clear();
	trackers_.clear();
	
	
	std::string modelpoints__x_param(model_prefix_ + "/model_points_x");
	std::string modelpoints__y_param(model_prefix_ + "/model_points_y");
	std::string modelpoints__z_param(model_prefix_ + "/model_points_z");
	
	XmlRpc::XmlRpcValue modelpoints__x_list;
	XmlRpc::XmlRpcValue modelpoints__y_list;
	XmlRpc::XmlRpcValue modelpoints__z_list;
	ros::param::get(modelpoints__x_param,modelpoints__x_list);
	ros::param::get(modelpoints__y_param,modelpoints__y_list);
	ros::param::get(modelpoints__z_param,modelpoints__z_list);
	
	ROS_INFO_STREAM(modelpoints__x_param << " value " << modelpoints__x_list);
	ROS_INFO_STREAM(modelpoints__y_param << " value " << modelpoints__y_list);
	ROS_INFO_STREAM(modelpoints__z_param << " value " << modelpoints__z_list);
	
	ROS_ASSERT(modelpoints__x_list.getType() == XmlRpc::XmlRpcValue::TypeArray);
	ROS_ASSERT(modelpoints__x_list.size() == modelpoints__y_list.size() && modelpoints__x_list.size()==modelpoints__z_list.size());
	for(int i=0;i<modelpoints__x_list.size();i++){
		ROS_ASSERT(modelpoints__x_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
		ROS_ASSERT(modelpoints__y_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
		ROS_ASSERT(modelpoints__z_list[i].getType() == XmlRpc::XmlRpcValue::TypeDouble);
				
		visp_camera_calibration::CalibPoint element = visp_camera_calibration::CalibPoint();
		
		
		element.X = modelpoints__x_list[i];
		element.Y = modelpoints__y_list[i];
		element.Z = modelpoints__z_list[i];
		
		ROS_INFO_STREAM("initialized"<< element.X << "," << element.Y << "," << element.Z); 
		
		points_.push_back(element);
		
		
		//ROS_INFO_STREAM("element i=" << element.i << " j=" << element.j);
		
		boost::shared_ptr<vpDot2> tracker(new vpDot2());
		
		tracker->setSizePrecision(.1);
		tracker->setEllipsoidShapePrecision(.8);
		tracker->setGrayLevelPrecision(.65);
		trackers_.push_back(tracker);
		trackers_status_.push_back(NOT_TRACKING);
	}
	
	//now we have all the trackers for each model point, let us initialate trackers based on 
	//suplied calibration data
	
	for(uint i=0;i<msg->points.size();i++){
		visp_camera_calibration::CalibPoint element = msg->points[i];
		
		ROS_INFO_STREAM("looking for i=" << element.i << " j=" << element.j << "," << element.X << "," << element.Y << "," << element.Z); 
		
		for(uint j=0;j<points_.size();j++){
			visp_camera_calibration::CalibPoint element2 = points_[j];
			
			if( epsilonEquals(element.X, element2.X, 0.001) && 	
				epsilonEquals(element.Y, element2.Y, 0.001) &&
				epsilonEquals(element.Z, element2.Z, 0.001)){
				
				//we found (epsilon) identical point
				ROS_INFO_STREAM("found i=" << element2.i << " j=" << element2.j << "," << element2.X << "," << element2.Y << "," << element2.Z); 
				try{
					trackers_[j]->initTracking(image_, vpImagePoint(element.i, element.j) , 10);//dodgy???
					trackers_status_[j] = TRACKING;
				}catch(...){
					ROS_ERROR_STREAM("failed to initialize dot " << j);
					trackers_status_[j] = NOT_TRACKING;
				}
			}
		}
	}
}
Ejemplo n.º 14
0
void GlobalPositionRcdWrite::analyze(const edm::Event& evt, const edm::EventSetup& iSetup)
{
   if (nEventCalls_ > 0) {
     std::cout << "Writing to DB to be done only once, "
	       << "set 'untracked PSet maxEvents = {untracked int32 input = 1}'."
	       << "(Your writing should be fine.)" << std::endl;
     return;
   }
   
   Alignments* globalPositions = new Alignments();

   AlignTransform tracker(AlignTransform::Translation(m_tracker.getParameter<double>("x"),
						      m_tracker.getParameter<double>("y"),
						      m_tracker.getParameter<double>("z")),
			  (m_useEulerAngles != true) ?
			    this->toMatrix(m_tracker.getParameter<double>("alpha"),
					   m_tracker.getParameter<double>("beta"),
					   m_tracker.getParameter<double>("gamma"))
			  :
			    AlignTransform::EulerAngles(m_tracker.getParameter<double>("alpha"),
							m_tracker.getParameter<double>("beta"),
							m_tracker.getParameter<double>("gamma")),
			  DetId(DetId::Tracker).rawId());
   
   AlignTransform muon(AlignTransform::Translation(m_muon.getParameter<double>("x"),
						   m_muon.getParameter<double>("y"),
						   m_muon.getParameter<double>("z")),
		       (m_useEulerAngles != true) ?
		         this->toMatrix(m_muon.getParameter<double>("alpha"),
					m_muon.getParameter<double>("beta"),
					m_muon.getParameter<double>("gamma"))
		       :
		         AlignTransform::EulerAngles(m_muon.getParameter<double>("alpha"),
						     m_muon.getParameter<double>("beta"),
						     m_muon.getParameter<double>("gamma")),
		       DetId(DetId::Muon).rawId());
   
   AlignTransform ecal(AlignTransform::Translation(m_ecal.getParameter<double>("x"),
						   m_ecal.getParameter<double>("y"),
						   m_ecal.getParameter<double>("z")),
		       (m_useEulerAngles != true) ?
		         this->toMatrix(m_ecal.getParameter<double>("alpha"),
					m_ecal.getParameter<double>("beta"),
					m_ecal.getParameter<double>("gamma"))
		       :
		         AlignTransform::EulerAngles(m_ecal.getParameter<double>("alpha"),
						     m_ecal.getParameter<double>("beta"),
						     m_ecal.getParameter<double>("gamma")),
		       DetId(DetId::Ecal).rawId());

   AlignTransform hcal(AlignTransform::Translation(m_hcal.getParameter<double>("x"),
						   m_hcal.getParameter<double>("y"),
						   m_hcal.getParameter<double>("z")),
		       (m_useEulerAngles != true) ?
		         this->toMatrix(m_hcal.getParameter<double>("alpha"),
					m_hcal.getParameter<double>("beta"),
					m_hcal.getParameter<double>("gamma"))
		       :
		         AlignTransform::EulerAngles(m_hcal.getParameter<double>("alpha"),
						     m_hcal.getParameter<double>("beta"),
						     m_hcal.getParameter<double>("gamma")),
		       DetId(DetId::Hcal).rawId());

   AlignTransform calo(AlignTransform::Translation(m_calo.getParameter<double>("x"),
						   m_calo.getParameter<double>("y"),
						   m_calo.getParameter<double>("z")),
		       (m_useEulerAngles != true) ?
		         this->toMatrix(m_calo.getParameter<double>("alpha"),
					m_calo.getParameter<double>("beta"),
					m_calo.getParameter<double>("gamma"))
		       :
		         AlignTransform::EulerAngles(m_calo.getParameter<double>("alpha"),
						     m_calo.getParameter<double>("beta"),
						     m_calo.getParameter<double>("gamma")),
		       DetId(DetId::Calo).rawId());

   std::cout << "\nProvided rotation angles are interpreted as "
	     << ((m_useEulerAngles != true) ? "rotations around X, Y and Z" : "Euler angles")
	     << ".\n" << std::endl;
   
   std::cout << "Tracker (" << tracker.rawId() << ") at " << tracker.translation() 
	     << " " << tracker.rotation().eulerAngles() << std::endl;
   std::cout << tracker.rotation() << std::endl;

   std::cout << "Muon (" << muon.rawId() << ") at " << muon.translation() 
	     << " " << muon.rotation().eulerAngles() << std::endl;
   std::cout << muon.rotation() << std::endl;

   std::cout << "Ecal (" << ecal.rawId() << ") at " << ecal.translation() 
	     << " " << ecal.rotation().eulerAngles() << std::endl;
   std::cout << ecal.rotation() << std::endl;

   std::cout << "Hcal (" << hcal.rawId() << ") at " << hcal.translation()
	     << " " << hcal.rotation().eulerAngles() << std::endl;
   std::cout << hcal.rotation() << std::endl;

   std::cout << "Calo (" << calo.rawId() << ") at " << calo.translation()
	     << " " << calo.rotation().eulerAngles() << std::endl;
   std::cout << calo.rotation() << std::endl;

   globalPositions->m_align.push_back(tracker);
   globalPositions->m_align.push_back(muon);
   globalPositions->m_align.push_back(ecal);
   globalPositions->m_align.push_back(hcal);
   globalPositions->m_align.push_back(calo);

   std::cout << "Uploading to the database..." << std::endl;

   edm::Service<cond::service::PoolDBOutputService> poolDbService;

   if (!poolDbService.isAvailable())
      throw cms::Exception("NotAvailable") << "PoolDBOutputService not available";
                  
//    if (poolDbService->isNewTagRequest("GlobalPositionRcd")) {
//       poolDbService->createNewIOV<Alignments>(&(*globalPositions), poolDbService->endOfTime(), "GlobalPositionRcd");
//    } else {
//       poolDbService->appendSinceTime<Alignments>(&(*globalPositions), poolDbService->currentTime(), "GlobalPositionRcd");
//    }
   poolDbService->writeOne<Alignments>(&(*globalPositions), 
				       poolDbService->currentTime(),
				       //poolDbService->beginOfTime(),
                                       "GlobalPositionRcd");



   std::cout << "done!" << std::endl;
   nEventCalls_++;
}
Ejemplo n.º 15
0
void Cbt_tracker_account::dump(Cstream_writer& w) const
{
	w.write_data(tracker());
	w.write_data(user());
	w.write_data(pass());
}
Ejemplo n.º 16
0
int
main(int argc, char** argv)
{
  // Setup the parameters to use OnlineBoosting or MILTrack as the underlying tracking algorithm
  cv::ObjectTrackerParams params;
#if 0
  params.algorithm_ = cv::ObjectTrackerParams::CV_ONLINEBOOSTING;
  //params.algorithm_ = cv::ObjectTrackerParams::CV_SEMIONLINEBOOSTING;
  params.num_classifiers_ = 100;
  params.overlap_ = 0.99f;
  params.search_factor_ = 2;
#else
  params.algorithm_ = cv::ObjectTrackerParams::CV_ONLINEMIL;
  params.num_classifiers_ = 50;
  params.num_features_ = 250;
#endif

  // Instantiate an object tracker
  cv::ObjectTracker tracker(params);

  // Read in a sequence of images from disk as the video source
  const char* directory = "data/david";
  const int start = 1;
  const int stop = 462;
  const int delta = 1;
  const char* prefix = "img";
  const char* suffix = "png";
  char filename[1024];

  // Some book-keeping
  bool is_tracker_initialized = false;
  CvRect init_bb = cvRect(122, 58, 75, 97); // the initial tracking bounding box

  /* const char* cascade_name = "haarcascade_frontalface_alt_tree.xml";
   const int minsz = 20;
   if( Tracker::facecascade.empty() )
   Tracker::facecascade.load(cascade_name);

   cv::Mat gray;
   cv::cvtColor(frame, gray, CV_BGR2GRAY);
   cv::equalizeHist(gray, gray);

   std::vector<cv::Rect> faces;
   facecascade.detectMultiScale(gray, faces, 1.05, 3, CV_HAAR_DO_CANNY_PRUNING ,cvSize(minsz, minsz));

   bool is_good = false;
   cv::Rect r;
   for (int index = faces.size() - 1; index >= 0; --index)
   {
   r = faces[index];
   if (r.width < minsz || r.height < minsz || (r.y + r.height + 10) > frame.rows || (r.x + r.width) > frame.cols
   || r.y < 0 || r.x < 0)
   continue;
   is_good = true;
   break;
   }
   */

  cv::Rect theTrack;
  bool tracker_failed = false;

  // Read in images one-by-one and track them
  cv::namedWindow("Tracker Display", cv::WINDOW_NORMAL);
  for (int frame = start; frame <= stop; frame += delta)
  {
    sprintf(filename, "%s/%s%05d.%s", directory, prefix, frame, suffix);
    cv::Mat image = cv::imread(filename);
    if (image.empty())
    {
      std::cerr << "Error loading image file: " << filename << "!\n" << std::endl;
      break;
    }

    // Initialize/Update the tracker
    if (!is_tracker_initialized)
    {
      // Initialize the tracker
      if (!tracker.initialize(image, init_bb))
      {
        // If it didn't work for some reason, exit now
        std::cerr << "\n\nCould not initialize the tracker!  Exiting early...\n" << std::endl;
        break;
      }

      // Store the track for display
      theTrack = init_bb;
      tracker_failed = false;

      // Now it's initialized
      is_tracker_initialized = true;
      std::cout << std::endl;
      continue;
    }
    else
    {
      // Update the tracker
      if (!tracker.update(image, theTrack))
      {
        std::cerr << "\rCould not update tracker (" << frame << ")";
        tracker_failed = true;
      }
      else
      {
        tracker_failed = false;
      }
    }

    // Display the tracking box
    CvScalar box_color;
    if (tracker_failed)
    {
      box_color = cv::Scalar(255, 0, 0);
    }
    else
    {
      box_color = cv::Scalar(255, 255, 0);
    }
    cv::rectangle(image, cvPoint(theTrack.x, theTrack.y),
                  cvPoint(theTrack.x + theTrack.width - 1, theTrack.y + theTrack.height - 1), box_color, 2);

    // Display the new image
    cv::imshow("Tracker Display", image);

    // Check if the user wants to exit early
    int key = cv::waitKey(1);
    if (key == 'q' || key == 'Q')
    {
      break;
    }
  }

  // Exit application
  std::cout << std::endl;
  return 0;
}
Ejemplo n.º 17
0
/*! \section example40 Example 40: Exhaustive (optimal) feature selection.

Selects features exhaustively, i.e., evaluates all possible feature combinations. This approach
is guaranteed to find optimum with respect to the chosen criterion, but its exponential time
complexity renders it prohibitive for even moderately dimensioned tasks. Here it is demonstrated
on 15-dimensional data with 3-NN (based on L1.5 distance) wrapper classification accuracy as FS criterion - note how
time consuming the computation is even for relatively low-dimensional case. Classification accuracy 
(i.e, FS wrapper criterion value) is estimated on the first 50% of data samples by means of 3-fold cross-validation. 
The final classification performance on the selected subspace is eventually validated on the second 50% of data. 
Exhaustive search is called here in d-optimizing setting, invoked by parameter 0 in search(0,...), which is 
otherwise used to specify the required subset size. Optional result tracking is employed here to reveal
duplicate solutions yielding the same maximum criterion value (see also \ref example60).
*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	//typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Distance_Lp<DATATYPE,REALTYPE,DIMTYPE,SUBSET,3,2> DISTANCE;
	typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCE> CLASSIFIERKNN;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPERKNN;
	typedef FST::Result_Tracker_Dupless<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET> TRACKER;

		std::cout << "Starting Example 40: Exhaustive (optimal) feature selection..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/speech_15.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));  sub->deselect_all();
	// set-up 3-Nearest Neighbor classifier based on Euclidean distances
		boost::shared_ptr<CLASSIFIERKNN> cknn(new CLASSIFIERKNN); cknn->set_k(3);
	// wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERKNN> wknn(new WRAPPERKNN);
		wknn->initialize(cknn,da);
	// set-up Exhaustive Search procedure
		FST::Search_Exhaustive<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN> srch;
	// set-up result tracker to enable logging of candidate solutions, ordered descending by value 
	// (optionally limit the number of kept records to 50000 highest valued to prevent memory exhaustion due to possibly excessive number of candidates)
		boost::shared_ptr<TRACKER> tracker(new TRACKER(50000));
	// let the tracker register only solution no worse than "the best known criterion value minus 0.05"
		tracker->set_inclusion_margin(0.05);
	// register the result tracker with the used search object
		srch.enable_result_tracking(tracker);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wknn << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		srch.set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		if(!srch.search(0,critval_train,sub,wknn,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		cknn->train(da,sub);
		cknn->test(critval_test,da);
		std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
	// report tracker contents
		std::cout << "Result tracker records " << tracker->size(0.0) << " solutions with criterion value equal to " << critval_train << "." << std::endl << std::endl;
		for(unsigned int i=1;i<10;i++) std::cout << "Result tracker records " << tracker->size((double)i*0.005) << " solutions with criterion value greater or equal to " << critval_train-(double)i*0.005 << "." << std::endl << std::endl;
		TRACKER::PResultRec result;
		if(tracker->get_first(result) && tracker->size(0.0)>1) 
		{
			RETURNTYPE firstvalue=result->value;
			std::cout << "All recorded feature subsets yielding the same best known criterion value " << firstvalue << ":" << std::endl;
			while(tracker->get_next(result) && result->value==firstvalue) std::cout << *(result->sub) << std::endl;
		}
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Ejemplo n.º 18
0
int main(int argc, char* argv[]){

    std::string paramFileName = "param.txt";
    if(argc > 1){
        paramFileName = std::string(argv[1]);
    }

    cv::namedWindow("track", CV_WINDOW_NORMAL);
    cv::namedWindow("skin", CV_WINDOW_NORMAL);
    cv::VideoCapture vc(0);
    int h = 600;
    int w = 600;

    HandDetectorHist myHandDetector("track");
    //HandDetector myHandDetector;
    Tracker tracker(paramFileName);

    std::vector<std::pair<float, float>> samples;
    cv::Mat img, subImg, binimg;
    while(cv::waitKey(5) != 'q'){
        vc >> img ;
        subImg = img(cv::Range(0,h), cv::Range(0,w));
        cv::flip(subImg, subImg, 1);

        binimg = myHandDetector.findHand(subImg);

        samples.clear();

        for(int i=0 ; i<binimg.rows ; ++i){
            for(int j=0 ; j<binimg.cols ; ++j){
                if(binimg.at<unsigned char>(i,j) > 128)
                {
                    samples.push_back(std::make_pair(float(j),float(i)));
                }
            }
        }
        tracker.newFrame(samples.begin(), samples.end());
        tracker.draw(subImg, Display::MESH);

        cv::imshow("track", subImg);
        cv::imshow("skin", binimg);
    }
    std::vector<float> x= tracker.retvalue();
    for (int d=0; d<x.size(); d++) {
        printf("%f ",x[d]);
    }
    printf("\n");
    std::vector<float> y= tracker.retvalue1();
    for (int d=0; d<y.size(); d++) {
        printf("%f ",y[d]);
    }
    std::ofstream myfile;
    myfile.open("ab.csv",std::ios_base::app);
    for (int d=0; d<x.size(); d++)
    {
        myfile<<x[d]<<","<<y[d]<<",";
    }
    myfile<<"\n";
    myfile.close();
    return 0;
}
Ejemplo n.º 19
0
	void GL3FrameBufferProvider::attach_depth_stencil(const RenderBuffer &render_buffer)
	{
		FrameBufferStateTracker tracker(bind_target, handle, gc_provider);
		attach_object(GL_DEPTH_STENCIL_ATTACHMENT, render_buffer);
	}
Ejemplo n.º 20
0
bool ClassificationData::loadDatasetFromCSVFile(const std::string &filename,const UINT classLabelColumnIndex){

    numDimensions = 0;
    datasetName = "NOT_SET";
    infoText = "";

    //Clear any previous data
    clear();

    //Parse the CSV file
    FileParser parser;

    Timer timer;

    timer.start();
    
    if( !parser.parseCSVFile(filename,true) ){
        errorLog << "loadDatasetFromCSVFile(const std::string &filename,const UINT classLabelColumnIndex) - Failed to parse CSV file!" << std::endl;
        return false;
    }
    
    if( !parser.getConsistentColumnSize() ){
        errorLog << "loadDatasetFromCSVFile(const std::string &filename,const UINT classLabelColumnIndexe) - The CSV file does not have a consistent number of columns!" << std::endl;
        return false;
    }
    
    if( parser.getColumnSize() <= 1 ){
        errorLog << "loadDatasetFromCSVFile(const std::string &filename,const UINT classLabelColumnIndex) - The CSV file does not have enough columns! It should contain at least two columns!" << std::endl;
        return false;
    }
    
    //Set the number of dimensions
    numDimensions = parser.getColumnSize()-1;

    timer.start();

    //Reserve the memory for the data
    data.resize( parser.getRowSize(), ClassificationSample(numDimensions) );

    timer.start();
   
    //Loop over the samples and add them to the data set
    UINT classLabel = 0;
    UINT j = 0;
    UINT n = 0;
    totalNumSamples = parser.getRowSize();
    for(UINT i=0; i<totalNumSamples; i++){
        //Get the class label
        classLabel = grt_from_str< UINT >( parser[i][classLabelColumnIndex] );
        
        //Set the class label
        data[i].setClassLabel( classLabel );
        
        //Get the sample data
        j=0;
        n=0;
        while( j != numDimensions ){
            if( n != classLabelColumnIndex ){
                data[i][j++] = grt_from_str< Float >( parser[i][n] );
            }
            n++;
        }
        
        //Update the class tracker
        if( classTracker.size() == 0 ){
            ClassTracker tracker(classLabel,1);
            classTracker.push_back(tracker);
        }else{
            bool labelFound = false;
            const size_t numClasses = classTracker.size();
            for(size_t i=0; i<numClasses; i++){
                if( classLabel == classTracker[i].classLabel ){
                    classTracker[i].counter++;
                    labelFound = true;
                    break;
                }
            }
            if( !labelFound ){
                ClassTracker tracker(classLabel,1);
                classTracker.push_back(tracker);
            }
        }
    }

    //Sort the class labels
    sortClassLabels();
    
    return true;
}
Ejemplo n.º 21
0
	void GL3FrameBufferProvider::attach_depth_stencil(const TextureCube &texture, TextureSubtype subtype, int level)
	{
		FrameBufferStateTracker tracker(bind_target, handle, gc_provider);
		attach_object(GL_DEPTH_STENCIL_ATTACHMENT, texture, level, 0, decode_texture_subtype(subtype));
	}
Ejemplo n.º 22
0
int main( int argc, char* argv[] )
{
	// ARGUMENT PARSING //
    osg::ArgumentParser args( &argc, argv );
    args.getApplicationUsage()->setApplicationName( args.getApplicationName() );
    args.getApplicationUsage()->setDescription(args.getApplicationName()+" demonstrates osgVRPN");
    args.getApplicationUsage()->setCommandLineUsage(args.getApplicationName()+" [options]");
    args.getApplicationUsage()->addCommandLineOption("-h or --help","Display this information");
    args.getApplicationUsage()->addKeyboardMouseBinding( "1", "Trackball manipulator" );
    args.getApplicationUsage()->addKeyboardMouseBinding( "2", "Tracker manipulator" );

	// INITIALIZE TRACKER //
	osgVRPN::Analog* analog( new osgVRPN::Analog( "Wanda@localhost" ) );
	osgVRPN::Button* button( new osgVRPN::Button( "Wanda@localhost" ) );
	osgVRPN::VRTracker* tracker( new osgVRPN::VRTracker( "Tracker0@localhost", 1 ) );	// ID for Flock HMD
    osgVRPN::VRManipulator* manip( new osgVRPN::VRManipulator( tracker ) );
	manip->setAutoComputeHomePosition( false );

	osgVRPN::DeviceBundle::Instance()->addAnalog( analog );
	osgVRPN::DeviceBundle::Instance()->addButton( button );
	osgVRPN::DeviceBundle::Instance()->addTracker( tracker );
	osgVRPN::DeviceBundle::Instance()->addManipulator( manip );
	osgVRPN::DevicesCallback* devices( new osgVRPN::DevicesCallback( osgVRPN::DeviceBundle::Instance() ) );

	// INITIALIZE CAMERA //
    osgViewer::Viewer viewer;
    viewer.getUsage( *args.getApplicationUsage() );
    if (args.read("-h") || args.read("--help"))
    {
        args.getApplicationUsage()->write( std::cout );
        return 1;
    }

    osg::StateSet* ss( viewer.getCamera()->getOrCreateStateSet() );
    viewer.addEventHandler( new osgGA::StateSetManipulator( ss ) );
    viewer.addEventHandler( new osgViewer::HelpHandler( args.getApplicationUsage() ) );
    viewer.addEventHandler( new osgViewer::ThreadingHandler );
    viewer.addEventHandler( new osgViewer::WindowSizeHandler );
    viewer.addEventHandler( new osgViewer::StatsHandler );
    viewer.addEventHandler( new osgViewer::LODScaleHandler );
    viewer.addEventHandler( new osgViewer::ScreenCaptureHandler );
	viewer.addEventHandler( new osgVRPN::WalledModeHandler( manip ) );

    osgGA::KeySwitchMatrixManipulator* ksm( new osgGA::KeySwitchMatrixManipulator );
    viewer.setCameraManipulator( ksm );

	// CREATE SCENE //
	osg::Group* scene( new osg::Group );
	osg::Node*  world( new osg::Node );

    if( args.argc() > 1 )
    {
        world = osgDB::readNodeFiles(args);
    }
	else
	{
		world = osgDB::readNodeFile( "Images/stadium.ive.[-90,180,0].rot.[-1500,-250,4200].trans" );
	}
	scene->addChild( world );
	scene->addUpdateCallback( devices );

	// SET HOME MATRIX AND ADD MANIPULATORS //
    ksm->addMatrixManipulator( '1', "Tracker", manip );
    ksm->addMatrixManipulator( '2', "Trackball", new osgGA::TrackballManipulator() );

	// Increase the field of view so we can see a good portion of the stadium.
	double fovy, aspectRatio, zNear, zFar;
	osg::ref_ptr<osg::Camera> camera = viewer.getCamera();

	camera->setComputeNearFarMode(osg::CullSettings::DO_NOT_COMPUTE_NEAR_FAR );
	camera->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);
	fovy *= 2.5;  /*aspectRatio *= 2;*/  zNear *= 5;  zFar *= 5;
	camera->setProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);

	// Set the model.  Lighting is terrible, but osg::View::NO_LIGHT 
	// seems to work.
    osgUtil::Optimizer optimizer;
    optimizer.optimize( scene );
    viewer.setSceneData( scene );
	viewer.setLightingMode( osg::View::NO_LIGHT );

	// VRManipulator navigation functions have Y up.
	viewer.frame(); 
	osg::Vec3 eye( 0, 0, -1 );
	osg::Vec3 center( 0, 0, 0 );
	osg::Vec3 up( 0, 1, 0 );
	osg::Matrix matrix;
	
	matrix.makeLookAt( eye, center, up );
	manip->setHomeMatrix( matrix );

	// MAIN LOOP //
	unsigned int numAnalogs = 0;
	float analogVal = 0.0f;

	while(!viewer.done())
	{
		if ( numAnalogs >= 1 )
		{	
			// Allow for a certain tolerance in moving the joystick.
			analogVal = osgVRPN::DeviceBundle::Instance()->getValue( 0, 0 );

			if ( analogVal > 0.2f || analogVal < -0.2f )
			{
				manip->navRotate( osg::DegreesToRadians( -analogVal ), osg::Vec3( 0, 1, 0 ) );
			}
		}
		
		numAnalogs = osgVRPN::DeviceBundle::Instance()->getNumChannels( 0 );

		viewer.frame();
	}

	return 0;	

} 
void
ApplicationTypesWindow::_SetType(BMimeType* type, int32 forceUpdate)
{
	bool enabled = type != NULL;
	bool appFound = true;

	// update controls

	if (type != NULL) {
		if (fCurrentType == *type) {
			if (!forceUpdate)
				return;
		} else
			forceUpdate = B_EVERYTHING_CHANGED;

		if (&fCurrentType != type)
			fCurrentType.SetTo(type->Type());

		fSignatureView->SetText(type->Type());

		char description[B_MIME_TYPE_LENGTH];

		if ((forceUpdate & B_SHORT_DESCRIPTION_CHANGED) != 0) {
			if (type->GetShortDescription(description) != B_OK)
				description[0] = '\0';
			fNameView->SetText(description);
		}

		entry_ref ref;
		if ((forceUpdate & B_APP_HINT_CHANGED) != 0
			&& be_roster->FindApp(fCurrentType.Type(), &ref) == B_OK) {
			// Set launch message
			BMessenger tracker("application/x-vnd.Be-TRAK");
			BMessage* message = new BMessage(B_REFS_RECEIVED);
			message->AddRef("refs", &ref);

			fLaunchButton->SetMessage(message);
			fLaunchButton->SetTarget(tracker);

			// Set path
			BPath path(&ref);
			path.GetParent(&path);
			fPathView->SetText(path.Path());

			// Set "Show In Tracker" message
			BEntry entry(path.Path());
			entry_ref directoryRef;
			if (entry.GetRef(&directoryRef) == B_OK) {
				BMessenger tracker("application/x-vnd.Be-TRAK");
				message = new BMessage(B_REFS_RECEIVED);
				message->AddRef("refs", &directoryRef);

				fTrackerButton->SetMessage(message);
				fTrackerButton->SetTarget(tracker);
			} else {
				fTrackerButton->SetMessage(NULL);
				appFound = false;
			}
		}

		if (forceUpdate == B_EVERYTHING_CHANGED) {
			// update version information

			BFile file(&ref, B_READ_ONLY);
			if (file.InitCheck() == B_OK) {
				BAppFileInfo appInfo(&file);
				version_info versionInfo;
				if (appInfo.InitCheck() == B_OK
					&& appInfo.GetVersionInfo(&versionInfo, B_APP_VERSION_KIND)
						== B_OK) {
					char version[256];
					snprintf(version, sizeof(version),
						"%" B_PRIu32 ".%" B_PRIu32 ".%" B_PRIu32 ", %s/%" B_PRIu32,
						versionInfo.major, versionInfo.middle,
						versionInfo.minor,
						variety_to_text(versionInfo.variety),
						versionInfo.internal);
					fVersionView->SetText(version);
					fDescriptionView->SetText(versionInfo.long_info);
				} else {
					fVersionView->SetText(NULL);
					fDescriptionView->SetText(NULL);
				}
			}
		}
	} else {
		fNameView->SetText(NULL);
		fSignatureView->SetText(NULL);
		fPathView->SetText(NULL);

		fVersionView->SetText(NULL);
		fDescriptionView->SetText(NULL);
	}

	fNameView->SetEnabled(enabled);
	fSignatureView->SetEnabled(enabled);
	fPathView->SetEnabled(enabled);

	fVersionView->SetEnabled(enabled);
	fDescriptionLabel->SetEnabled(enabled);

	fTrackerButton->SetEnabled(enabled && appFound);
	fLaunchButton->SetEnabled(enabled && appFound);
	fEditButton->SetEnabled(enabled && appFound);
}
Ejemplo n.º 24
0
 //
 // Some non-template functions
 //
 void doTrace(const char *file, unsigned line, const char *message) {
   tracker().trace(file, line, message);
 }
Ejemplo n.º 25
0
int main(int argc, char** argv)
{
#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100) || defined(VISP_HAVE_FFMPEG)
  std::string videoname = "bruegel.mpg";

  for (int i=0; i<argc; i++) {
    if (std::string(argv[i]) == "--videoname")
      videoname = std::string(argv[i+1]);
    else if (std::string(argv[i]) == "--help") {
      std::cout << "\nUsage: " << argv[0] << " [--name <video name>] [--help]\n" << std::endl;
      return 0;
    }
  }

  std::cout << "Video name: " << videoname << std::endl;

  vpImage<unsigned char> I;

  vpVideoReader g;
  g.setFileName(videoname);
  g.open(I);

#if defined(VISP_HAVE_X11)
  vpDisplayX display;
#elif defined(VISP_HAVE_GDI)
  vpDisplayGDI display;
#elif defined(VISP_HAVE_OPENCV)
  vpDisplayOpenCV display;
#else
  std::cout << "No image viewer is available..." << std::endl;
#endif

  display.init(I, 100, 100, "Template tracker");
  vpDisplay::display(I);
  vpDisplay::flush(I);

  //! [Construction]
  vpTemplateTrackerWarpHomography warp;
  vpTemplateTrackerSSDInverseCompositional tracker(&warp);
  //! [Construction]

  tracker.setSampling(2, 2);
  tracker.setLambda(0.001);
  tracker.setIterationMax(200);
  tracker.setPyramidal(2, 1);

  //! [Init]
  tracker.initClick(I);
  //! [Init]

  while(1){
    g.acquire(I);
    vpDisplay::display(I);

    //! [Track]
    tracker.track(I);
    //! [Track]

    //! [Homography]
    vpColVector p = tracker.getp();
    vpHomography H = warp.getHomography(p);
    std::cout << "Homography: \n" << H << std::endl;
    //! [Homography]

    //! [Display]
    tracker.display(I, vpColor::red);
    //! [Display]

    if (vpDisplay::getClick(I, false))
      break;

    vpDisplay::flush(I);
  }
#else
  (void)argc;
  (void)argv;
#endif
}
Ejemplo n.º 26
0
 void doWarn(const char *file, unsigned line, const char *message) {
   tracker().warning(file, line, message);
 }
nsresult
nsAbsoluteContainingBlock::Reflow(nsContainerFrame*        aDelegatingFrame,
                                  nsPresContext*           aPresContext,
                                  const nsHTMLReflowState& aReflowState,
                                  nsReflowStatus&          aReflowStatus,
                                  nscoord                  aContainingBlockWidth,
                                  nscoord                  aContainingBlockHeight,
                                  PRBool                   aConstrainHeight,
                                  PRBool                   aCBWidthChanged,
                                  PRBool                   aCBHeightChanged,
                                  nsOverflowAreas*         aOverflowAreas)
{
  nsReflowStatus reflowStatus = NS_FRAME_COMPLETE;

  PRBool reflowAll = aReflowState.ShouldReflowAllKids();

  nsIFrame* kidFrame;
  nsOverflowContinuationTracker tracker(aPresContext, aDelegatingFrame, PR_TRUE);
  for (kidFrame = mAbsoluteFrames.FirstChild(); kidFrame; kidFrame = kidFrame->GetNextSibling()) {
    PRBool kidNeedsReflow = reflowAll || NS_SUBTREE_DIRTY(kidFrame) ||
      FrameDependsOnContainer(kidFrame, aCBWidthChanged, aCBHeightChanged);
    if (kidNeedsReflow && !aPresContext->HasPendingInterrupt()) {
      // Reflow the frame
      nsReflowStatus  kidStatus = NS_FRAME_COMPLETE;
      ReflowAbsoluteFrame(aDelegatingFrame, aPresContext, aReflowState,
                          aContainingBlockWidth, aContainingBlockHeight,
                          aConstrainHeight, kidFrame, kidStatus,
                          aOverflowAreas);
      nsIFrame* nextFrame = kidFrame->GetNextInFlow();
      if (!NS_FRAME_IS_FULLY_COMPLETE(kidStatus)) {
        // Need a continuation
        if (!nextFrame) {
          nsresult rv = aPresContext->PresShell()->FrameConstructor()->
            CreateContinuingFrame(aPresContext, kidFrame, aDelegatingFrame, &nextFrame);
          NS_ENSURE_SUCCESS(rv, rv);
        }
        // Add it as an overflow container.
        //XXXfr This is a hack to fix some of our printing dataloss.
        // See bug 154892. Not sure how to do it "right" yet; probably want
        // to keep continuations within an nsAbsoluteContainingBlock eventually.
        tracker.Insert(nextFrame, kidStatus);
        NS_MergeReflowStatusInto(&reflowStatus, kidStatus);
      }
      else {
        // Delete any continuations
        if (nextFrame) {
          tracker.Finish(kidFrame);
          static_cast<nsContainerFrame*>(nextFrame->GetParent())
            ->DeleteNextInFlowChild(aPresContext, nextFrame, PR_TRUE);
        }
      }
    }
    else {
      tracker.Skip(kidFrame, reflowStatus);
      if (aOverflowAreas) {
        aDelegatingFrame->ConsiderChildOverflow(*aOverflowAreas, kidFrame);
      }
    }

    // Make a CheckForInterrupt call, here, not just HasPendingInterrupt.  That
    // will make sure that we end up reflowing aDelegatingFrame in cases when
    // one of our kids interrupted.  Otherwise we'd set the dirty or
    // dirty-children bit on the kid in the condition below, and then when
    // reflow completes and we go to mark dirty bits on all ancestors of that
    // kid we'll immediately bail out, because the kid already has a dirty bit.
    // In particular, we won't set any dirty bits on aDelegatingFrame, so when
    // the following reflow happens we won't reflow the kid in question.  This
    // might be slightly suboptimal in cases where |kidFrame| itself did not
    // interrupt, since we'll trigger a reflow of it too when it's not strictly
    // needed.  But the logic to not do that is enough more complicated, and
    // the case enough of an edge case, that this is probably better.
    if (kidNeedsReflow && aPresContext->CheckForInterrupt(aDelegatingFrame)) {
      if (aDelegatingFrame->GetStateBits() & NS_FRAME_IS_DIRTY) {
        kidFrame->AddStateBits(NS_FRAME_IS_DIRTY);
      } else {
        kidFrame->AddStateBits(NS_FRAME_HAS_DIRTY_CHILDREN);
      }
    }
  }

  // Abspos frames can't cause their parent to be incomplete,
  // only overflow incomplete.
  if (NS_FRAME_IS_NOT_COMPLETE(reflowStatus))
    NS_FRAME_SET_OVERFLOW_INCOMPLETE(reflowStatus);

  NS_MergeReflowStatusInto(&aReflowStatus, reflowStatus);
  return NS_OK;
}
Ejemplo n.º 28
0
 void doFailTest(const char *file, unsigned line, const char *message) {
   tracker().failedTest(file, line, message);
   TS_ABORT();
 }
Ejemplo n.º 29
0
BPopUpMenu*
DeskbarView::_BuildMenu()
{
	BPopUpMenu* menu = new BPopUpMenu(B_EMPTY_STRING, false, false);
	menu->SetFont(be_plain_font);

	menu->AddItem(new BMenuItem(MDR_DIALECT_CHOICE (
		"Create new message", "N) 新規メッセージ作成")B_UTF8_ELLIPSIS,
		new BMessage(MD_OPEN_NEW)));
	menu->AddSeparatorItem();

	BMessenger tracker(kTrackerSignature);
	BNavMenu* navMenu;
	BMenuItem* item;
	BMessage* msg;
	entry_ref ref;

	BPath path;
	find_directory(B_USER_SETTINGS_DIRECTORY, &path);
	path.Append("Mail/Menu Links");

	BDirectory directory;
	if (_CreateMenuLinks(directory, path)) {
		int32 count = 0;

		while (directory.GetNextRef(&ref) == B_OK) {
			count++;

			path.SetTo(&ref);
			// the true here dereferences the symlinks all the way :)
			BEntry entry(&ref, true);

			// do we want to use the NavMenu, or just an ordinary BMenuItem?
			// we are using the NavMenu only for directories and queries
			bool useNavMenu = false;

			if (entry.InitCheck() == B_OK) {
				if (entry.IsDirectory())
					useNavMenu = true;
				else if (entry.IsFile()) {
					// Files should use the BMenuItem unless they are queries
					char mimeString[B_MIME_TYPE_LENGTH];
					BNode node(&entry);
					BNodeInfo info(&node);
					if (info.GetType(mimeString) == B_OK
						&& strcmp(mimeString, "application/x-vnd.Be-query")
							== 0)
						useNavMenu = true;
				}
				// clobber the existing ref only if the symlink derefernces
				// completely, otherwise we'll stick with what we have
				entry.GetRef(&ref);
			}

			msg = new BMessage(B_REFS_RECEIVED);
			msg->AddRef("refs", &ref);

			if (useNavMenu) {
				item = new BMenuItem(navMenu = new BNavMenu(path.Leaf(),
					B_REFS_RECEIVED, tracker), msg);
				navMenu->SetNavDir(&ref);
			} else
				item = new BMenuItem(path.Leaf(), msg);

			menu->AddItem(item);
			if(entry.InitCheck() != B_OK)
				item->SetEnabled(false);
		}
		if (count > 0)
			menu->AddSeparatorItem();
	}

	// Hack for R5's buggy Query Notification
	#ifdef HAIKU_TARGET_PLATFORM_BEOS
		menu->AddItem(new BMenuItem(
			MDR_DIALECT_CHOICE("Refresh New Mail Count",
				"未読メールカウントを更新"),
			new BMessage(MD_REFRESH_QUERY)));
	#endif

	// The New E-mail query

	if (fNewMessages > 0) {
		BString string;
		MDR_DIALECT_CHOICE(
			string << fNewMessages << " new message"
				<< (fNewMessages != 1 ? "s" : B_EMPTY_STRING),
			string << fNewMessages << " 通の未読メッセージ");

		_GetNewQueryRef(ref);

		item = new BMenuItem(navMenu = new BNavMenu(string.String(),
			B_REFS_RECEIVED, BMessenger(kTrackerSignature)),
			msg = new BMessage(B_REFS_RECEIVED));
		msg->AddRef("refs", &ref);
		navMenu->SetNavDir(&ref);

		menu->AddItem(item);
	} else {
		menu->AddItem(item = new BMenuItem(
			MDR_DIALECT_CHOICE ("No new messages","未読メッセージなし"), NULL));
		item->SetEnabled(false);
	}

	BMailAccounts accounts;
	if (modifiers() & B_SHIFT_KEY) {
		BMenu *accountMenu = new BMenu(
			MDR_DIALECT_CHOICE ("Check for mails only","R) メール受信のみ"));
		BFont font;
		menu->GetFont(&font);
		accountMenu->SetFont(&font);

		for (int32 i = 0; i < accounts.CountAccounts(); i++) {
			BMailAccountSettings* account = accounts.AccountAt(i);

			BMessage* message = new BMessage(MD_CHECK_FOR_MAILS);
			message->AddInt32("account", account->AccountID());

			accountMenu->AddItem(new BMenuItem(account->Name(), message));
		}
		if (accounts.CountAccounts() == 0) {
			item = new BMenuItem("<no accounts>", NULL);
			item->SetEnabled(false);
			accountMenu->AddItem(item);
		}
		accountMenu->SetTargetForItems(this);
		menu->AddItem(new BMenuItem(accountMenu,
			new BMessage(MD_CHECK_FOR_MAILS)));

		// Not used:
		// menu->AddItem(new BMenuItem(MDR_DIALECT_CHOICE (
		// "Check For Mails Only","メール受信のみ"), new BMessage(MD_CHECK_FOR_MAILS)));
		menu->AddItem(new BMenuItem(
			MDR_DIALECT_CHOICE ("Send pending mails", "M) 保留メールを送信"),
		new BMessage(MD_SEND_MAILS)));
	} else {
		menu->AddItem(item = new BMenuItem(
			MDR_DIALECT_CHOICE ("Check for mail now", "C) メールチェック"),
			new BMessage(MD_CHECK_SEND_NOW)));
		if (accounts.CountAccounts() == 0)
			item->SetEnabled(false);
	}

	menu->AddSeparatorItem();
	menu->AddItem(new BMenuItem(
		MDR_DIALECT_CHOICE ("Preferences", "P) メール環境設定") B_UTF8_ELLIPSIS,
		new BMessage(MD_OPEN_PREFS)));

	if (modifiers() & B_SHIFT_KEY) {
		menu->AddItem(new BMenuItem(
			MDR_DIALECT_CHOICE ("Shutdown mail services", "Q) 終了"),
			new BMessage(B_QUIT_REQUESTED)));
	}

	// Reset Item Targets (only those which aren't already set)

	for (int32 i = menu->CountItems(); i-- > 0;) {
		item = menu->ItemAt(i);
		if (item && (msg = item->Message()) != NULL) {
			if (msg->what == B_REFS_RECEIVED)
				item->SetTarget(tracker);
			else
				item->SetTarget(this);
		}
	}
	return menu;
}
Ejemplo n.º 30
0
void flexionFDSimulationWithHitMap(Model& model)
{
	addFlexionController(model);
	//addExtensionController(model);
	//addTibialLoads(model, 0);

    model.setUseVisualizer(true);

	// init system
	std::time_t result = std::time(nullptr);
	std::cout << "\nBefore initSystem() " << std::asctime(std::localtime(&result)) << endl;
	SimTK::State& si = model.initSystem();
	result = std::time(nullptr);
	std::cout << "\nAfter initSystem() " << std::asctime(std::localtime(&result)) << endl;
	
	// set gravity
	//model.updGravityForce().setGravityVector(si, Vec3(-9.80665,0,0));
	//model.updGravityForce().setGravityVector(si, Vec3(0,0,0));
	
	//setHipAngle(model, si, 90);
	//setKneeAngle(model, si, 0, false, false);
	//model.equilibrateMuscles( si);

	MultibodySystem& system = model.updMultibodySystem();
	SimbodyMatterSubsystem& matter = system.updMatterSubsystem();
	GeneralForceSubsystem forces(system);
	ContactTrackerSubsystem  tracker(system);
    CompliantContactSubsystem contactForces(system, tracker);
	contactForces.setTrackDissipatedEnergy(true);
    //contactForces.setTransitionVelocity(1e-3);

	for (int i=0; i < matter.getNumBodies(); ++i) {
		MobilizedBodyIndex mbx(i);
		if (i==19 || i==20 || i==22)// || i==15 || i==16)
		{
			MobilizedBody& mobod = matter.updMobilizedBody(mbx);
			std::filebuf fb;
			//cout << mobod.updBody().
			if (i==19)
				fb.open ( "../resources/femur_lat_r.obj",std::ios::in);
			else if (i==20)
				fb.open ( "../resources/femur_med_r.obj",std::ios::in);
			else if (i==22)
				fb.open ( "../resources/tibia_upper_r.obj",std::ios::in);
			//else if (i==15)
				//fb.open ( "../resources/meniscus_lat_r.obj",std::ios::in);
			//else if (i==16)
				//fb.open ( "../resources/meniscus_med_r.obj",std::ios::in);
			std::istream is(&fb);
			PolygonalMesh polMesh;
			polMesh.loadObjFile(is);
			fb.close();
			SimTK::ContactGeometry::TriangleMesh mesh(polMesh);
			ContactSurface contSurf;//(mesh, ContactMaterial(1.0e6, 1, 1, 0.03, 0.03), 0.001);
			if (i==19 || i==20 || i==22)
				contSurf = ContactSurface(mesh, ContactMaterial(10, 1, 1, 0.03, 0.03), 0.001);
			//else if (i==15 || i==16)
				//contSurf = ContactSurface(mesh, ContactMaterial(10, 3, 1, 0.03, 0.03), 0.001);
			DecorativeMesh showMesh(mesh.createPolygonalMesh());
			showMesh.setOpacity(0.5);
			mobod.updBody().addDecoration( showMesh);
			mobod.updBody().addContactSurface(contSurf);
		}
    }

	ModelVisualizer& viz(model.updVisualizer());
	//Visualizer viz(system);
	viz.updSimbodyVisualizer().addDecorationGenerator(new HitMapGenerator(system,contactForces));
    viz.updSimbodyVisualizer().setMode(Visualizer::RealTime);
    viz.updSimbodyVisualizer().setDesiredBufferLengthInSec(1);
    viz.updSimbodyVisualizer().setDesiredFrameRate(30);
    viz.updSimbodyVisualizer().setGroundHeight(-3);
    viz.updSimbodyVisualizer().setShowShadows(true);
	
    Visualizer::InputSilo* silo = new Visualizer::InputSilo();
	viz.updSimbodyVisualizer().addInputListener(silo);
    Array_<std::pair<String,int> > runMenuItems;
    runMenuItems.push_back(std::make_pair("Go", GoItem));
    runMenuItems.push_back(std::make_pair("Replay", ReplayItem));
    runMenuItems.push_back(std::make_pair("Quit", QuitItem));
    viz.updSimbodyVisualizer().addMenu("Run", RunMenuId, runMenuItems);

    Array_<std::pair<String,int> > helpMenuItems;
    helpMenuItems.push_back(std::make_pair("TBD - Sorry!", 1));
    viz.updSimbodyVisualizer().addMenu("Help", HelpMenuId, helpMenuItems);

    system.addEventReporter(new MyReporter(system,contactForces,ReportInterval));
	//system.addEventReporter(new Visualizer::Reporter(viz.updSimbodyVisualizer(), ReportInterval));
	
    // Check for a Run->Quit menu pick every <x> second.
	system.addEventHandler(new UserInputHandler(*silo, 0.001));

	system.realizeTopology();

	//Show ContactSurfaceIndex for each contact surface
  //  for (int i=0; i < matter.getNumBodies(); ++i) {
		//MobilizedBodyIndex mbx(i);
  //      const MobilizedBody& mobod = matter.getMobilizedBody(mbx);
  //      const int nsurfs = mobod.getBody().getNumContactSurfaces();
        //printf("mobod %d has %d contact surfaces\n", (int)mbx, nsurfs);
		////cout << "mobod with mass: " << (float)mobod.getBodyMass(si) << " has " << nsurfs << " contact surfaces" << endl;
		// //for (int i=0; i<nsurfs; ++i) {
  //          //printf("%2d: index %d\n", i, 
  //                 //(int)tracker.getContactSurfaceIndex(mbx,i)); 
  //      //}
  //  }

	//cout << "tracker num of surfaces: " << tracker.getNumSurfaces() << endl;

	//State state = system.getDefaultState();
	//viz.report(state);
	State& state = model.initializeState();
	viz.updSimbodyVisualizer().report(state);

	// Add reporters
    ForceReporter* forceReporter = new ForceReporter(&model);
    model.addAnalysis(forceReporter);

	CustomAnalysis* customReporter = new CustomAnalysis(&model, "r");
	model.addAnalysis(customReporter);

	// Create the integrator and manager for the simulation.
	SimTK::RungeKuttaMersonIntegrator integrator(model.getMultibodySystem());
	//SimTK::CPodesIntegrator integrator(model.getMultibodySystem());
	//integrator.setAccuracy(.01);
	//integrator.setAccuracy(1e-3);
	//integrator.setFixedStepSize(0.001);
	Manager manager(model, integrator);

	// Define the initial and final simulation times
	double initialTime = 0.0;
	double finalTime = 0.2;

	// Integrate from initial time to final time
	manager.setInitialTime(initialTime);
	manager.setFinalTime(finalTime);
	std::cout<<"\n\nIntegrating from "<<initialTime<<" to " <<finalTime<<std::endl;

	result = std::time(nullptr);
	std::cout << "\nBefore integrate(si) " << std::asctime(std::localtime(&result)) << endl;

	manager.integrate(state);

	result = std::time(nullptr);
	std::cout << "\nAfter integrate(si) " << std::asctime(std::localtime(&result)) << endl;

	// Save the simulation results
	Storage statesDegrees(manager.getStateStorage());
	statesDegrees.print("../outputs/states_flex.sto");
	model.updSimbodyEngine().convertRadiansToDegrees(statesDegrees);
	statesDegrees.setWriteSIMMHeader(true);
	statesDegrees.print("../outputs/states_degrees_flex.mot");
	// force reporter results
	forceReporter->getForceStorage().print("../outputs/force_reporter_flex.mot");
	//customReporter->print( "../outputs/custom_reporter_flex.mot");

	//cout << "You can choose 'Replay'" << endl;
	int menuId, item;
	unsigned int frameRate = 5;
	do { 
		cout << "Please choose 'Replay' or 'Quit'" << endl;
		viz.updInputSilo().waitForMenuPick(menuId, item);

        if (item != ReplayItem && item != QuitItem) 
            cout << "\aDude... follow instructions!\n";
		if (item == ReplayItem)
		{
			cout << "Type desired frame rate (integer) for playback and press Enter (default = 1) : ";
			//frameRate = cin.get();		
			cin >> frameRate;
			if (cin.fail()) 
			{
				cout << "Not an int. Setting default frame rate." << endl;
				cin.clear();
				cin.ignore(std::numeric_limits<int>::max(),'\n');
				frameRate = 1;
			}

			//cout << "saveEm size: " << saveEm.size() << endl;
			for (unsigned int i=0; i<saveEm.size(); i++)
			{
				viz.updSimbodyVisualizer().drawFrameNow(saveEm.getElt(i));
				if (frameRate == 0)
					frameRate = 1;
				usleep(1000000/frameRate);
			}
		}
	} while (menuId != RunMenuId || item != QuitItem);
}