void DecisionTree::train
(const vector<TrainingExample*>& examples,
 const vector<double>& weights) {
  // cerr << "Training tree with " << examples.size() << " training examples\n";
  // cerr << "Tree max-depth used: " << getMaxDepth() << endl;
  // cerr << "Classification threshold used: " << getClassificationThreshold() << endl;
  if(m_root) {
    delete m_root;
    m_root = 0;
  }
  m_feature_thresholds.clear();
  assert(examples.size()>0); 
  size_t num_features = examples[0]->getNumberOfFeatures();
  double sum_weights = sumWeights(weights);

  // compute feature averages that we shall use as thresholds...
  vector<double> feature_averages;
  // cerr << "Number of examples: " << num_examples << ", Number of features: " << num_features << endl;
  feature_averages.resize(num_features,0.0);
  for(size_t ie=0; ie<examples.size(); ++ie) {
    // cerr << "Processing example: " << ie << endl;
    const TrainingExample* ex = examples[ie];
    // cerr << "Number of features: " << examples[ie]->getNumberOfFeatures() << endl;
    assert(ex->getNumberOfFeatures()==num_features);
    for(size_t ifeat=0; ifeat<num_features; ifeat++) {
      // cerr << "Feature: " << ifeat << ", value: " << ex->getFeatureDoubleValue(ifeat);
      double fv = ex->getFeatureDoubleValue(ifeat);
      feature_averages[ifeat]+=fv*weights[ie]; 
    }
  }
  // cerr << "Average thresholds:\n";
  for(size_t ifeat=0; ifeat<num_features; ifeat++) {
    feature_averages[ifeat]/=sum_weights;
    // cerr << ifeat << ": " << feature_averages[ifeat] << "\t";
  }
  m_feature_thresholds = feature_averages;
 
  // cerr << "Recursive train tree:\n";
  set<size_t> empty_used_features;
  m_root = recursiveTrainTree(examples,weights,sum_weights,empty_used_features);
  // printTree();
}
Esempio n. 2
0
list<ElementData*> * Game::testWind(Map & map, Wind & wind)
{
	list<ElementData*> * toDestroy = new list<ElementData*>;
	TYP sum = sumWeights(map);
	TYP dir,force;
	wind.getWind(dir,force);
	int i=1;
	for(i; i<_highestBlock+1 && sum-force>0; i++)
		sum-=force;
	if(i<_highestBlock) //To oznacza, ze wiatr nie zostal zamortyzowany
	{
		for(int k = i; k<=_highestBlock; k++)		//chodzenie po wysokosciach
		{
			bool breaker = false;
			if(dir==WIND_RIGHT_DIR)
			{
				for(int j=0; j<map.getWidth() && !breaker; j++)
					if(map(j,i,DIM_WHO_TAB)!=EMPTY)
					{
						toDestroy->push_back(&map(j,k));
						breaker = true;
					}
			}
			else
			{
				for(int j=map.getWidth()-1; j>=0 && !breaker; j--)
					if(map(j,i,DIM_WHO_TAB)!=EMPTY)
					{
						toDestroy->push_back(&map(j,k));
						breaker = true;
					}
			}
		}
	}
	return toDestroy;
}
Esempio n. 3
0
l1menu::ReducedSamplePrivateMembers::ReducedSamplePrivateMembers( const l1menu::ReducedSample& thisObject, const std::string& filename )
	: event(thisObject), triggerMenu(mutableTriggerMenu_), eventRate(1), sumOfWeights(0)
{
	GOOGLE_PROTOBUF_VERIFY_VERSION;

	// Open the file with read ability
	int fileDescriptor = open( filename.c_str(), O_RDONLY );
	if( fileDescriptor==0 ) throw std::runtime_error( "ReducedSample initialise from file - couldn't open file" );
	::UnixFileSentry fileSentry( fileDescriptor ); // Use this as an exception safe way of closing the input file
	google::protobuf::io::FileInputStream fileInput( fileDescriptor );

	// First read the magic number at the start of the file and make sure it
	// matches what I expect. This is uncompressed so I'll wrap it in a block
	// to make sure the CodedInputStream is destructed before creating a new
	// one with gzip input.
	{
		google::protobuf::io::CodedInputStream codedInput( &fileInput );

		// As a read buffer, I'll create a string the correct size (filled with an arbitrary
		// character) and read straight into that.
		std::string readMagicNumber;
		if( !codedInput.ReadString( &readMagicNumber, FILE_FORMAT_MAGIC_NUMBER.size() ) ) throw std::runtime_error( "ReducedSample initialise from file - error reading magic number" );
		if( readMagicNumber!=FILE_FORMAT_MAGIC_NUMBER ) throw std::runtime_error( "ReducedSample - tried to initialise with a file that is not the correct format" );

		google::protobuf::uint32 fileformatVersion;
		if( !codedInput.ReadVarint32( &fileformatVersion ) ) throw std::runtime_error( "ReducedSample initialise from file - error reading file format version" );
		// So far I only have (and ever expect to have) one version of the file
		// format, imaginatively versioned "1". You never know though...
		if( fileformatVersion>1 ) std::cerr << "Warning: Attempting to read a ReducedSample with version " << fileformatVersion << " with code that only knows up to version 1." << std::endl;
	}

	google::protobuf::io::GzipInputStream gzipInput( &fileInput );
	google::protobuf::io::CodedInputStream codedInput( &gzipInput );

	// Disable warnings on this input stream (second parameter, -1). The
	// first parameter is the default. I'll change this if necessary in
	// the loop later.
	size_t totalBytesLimit=67108864;
	codedInput.SetTotalBytesLimit( totalBytesLimit, -1 );

	google::protobuf::uint64 messageSize;

	// Read the size of the header message
	if( !codedInput.ReadVarint64( &messageSize ) ) throw std::runtime_error( "ReducedSample initialise from file - error reading message size for header" );
	google::protobuf::io::CodedInputStream::Limit readLimit=codedInput.PushLimit(messageSize);
	if( !protobufSampleHeader.ParseFromCodedStream( &codedInput ) ) throw std::runtime_error( "ReducedSample initialise from file - some unknown error while reading header" );
	codedInput.PopLimit(readLimit);

	// Keep looping until there is nothing more to be read from the file.
	while( codedInput.ReadVarint64( &messageSize ) )
	{
		readLimit=codedInput.PushLimit(messageSize);

		// Make sure the CodedInputStream doesn't refuse to read the message because it's
		// read too much already. I'll also add an arbitrary 50 on to always make sure
		// I can read the next messageSize if there is one.
		if( gzipInput.ByteCount()+messageSize+50 > totalBytesLimit )
		{
			totalBytesLimit+=messageSize*5; // Might as well set it a little higher than necessary while I'm at it.
			codedInput.SetTotalBytesLimit( totalBytesLimit, -1 );
		}
		std::unique_ptr<l1menuprotobuf::Run> pNewRun( new l1menuprotobuf::Run );
		if( !pNewRun->ParseFromCodedStream( &codedInput ) ) throw std::runtime_error( "ReducedSample initialise from file - some unknown error while reading run" );
		protobufRuns.push_back( std::move( pNewRun ) );

		codedInput.PopLimit(readLimit);
	}


	// Always make sure there is at least one Run ready to be added to. Later
	// code assumes there is already a run there.
	if( protobufRuns.empty() )
	{
		std::unique_ptr<l1menuprotobuf::Run> pNewRun( new l1menuprotobuf::Run );
		protobufRuns.push_back( std::move( pNewRun ) );
	}

	// Count up the sum of the weights of all events
	for( const auto& pRun : protobufRuns )
	{
		sumOfWeights+=sumWeights( *pRun );
	}

	// I have all of the information in the protobuf members, but I also need the trigger information
	// in the form of l1menu::TriggerMenu. Copy out the required information.
	for( int triggerNumber=0; triggerNumber<protobufSampleHeader.trigger_size(); ++triggerNumber )
	{
		const l1menuprotobuf::Trigger& inputTrigger=protobufSampleHeader.trigger(triggerNumber);

		mutableTriggerMenu_.addTrigger( inputTrigger.name(), inputTrigger.version() );
		// Get a reference to the trigger I just created
		l1menu::ITrigger& trigger=mutableTriggerMenu_.getTrigger(triggerNumber);

		// Run through all of the parameters and set them to what they were
		// when the sample was made.
		for( int parameterNumber=0; parameterNumber<inputTrigger.parameter_size(); ++parameterNumber )
		{
			const auto& inputParameter=inputTrigger.parameter(parameterNumber);
			trigger.parameter(inputParameter.name())=inputParameter.value();
		}

		// I should probably check the threshold names exist. I'll do it another time.
	}

}
Esempio n. 4
0
	void EulerianParticleVelocityForce
	(
		cfdemCloud& sm,			
		const fvMesh& mesh,
		volVectorField& Uf_,
		volVectorField&	Up_,
		volScalarField& rho_,
		volScalarField& alpf_,
		volScalarField& Pg_,
		volVectorField& MappedDragForce_,
		const labelListList& particleList_,
		const bool& weighting_
	)
	{		
		// Neighbouring cells
		CPCCellToCellStencil neighbourCells(mesh);
				
		// get viscosity field
		#ifdef comp
		    const volScalarField nufField = sm.turbulence().mu()/rho_;
		#else
		    const volScalarField& nufField = sm.turbulence().nu();
		#endif

		// Gas pressure gradient
		volVectorField gradPg_ = fvc::grad(Pg_);
		interpolationCellPoint<vector> gradPgInterpolator_(gradPg_);

		// Local variables	
		label  cellID(-1);
		vector drag(0,0,0);
		vector Ufluid(0,0,0);
		
		vector position(0,0,0);
		scalar voidfraction(1);
		
		vector Up(0,0,0);
		vector Ur(0,0,0);
		scalar ds(0);
		
		scalar nuf(0);
		scalar rhof(0);
		
		vector WenYuDrag(0,0,0);
		
		interpolationCellPoint<scalar> voidfractionInterpolator_(alpf_);
		interpolationCellPoint<vector> UInterpolator_(Uf_);	
		
		scalar dist_s(0);
		scalar sumWeights(0);
		
		scalarField               weightScalar(27,scalar(0.0));
		Field <Field <scalar> >   particleWeights(particleList_.size(),weightScalar);
		
		//Info << " particle size " << particleList_.size() << endl;
		
		// Number of particle in a cell
		scalarField np(mesh.cells().size(),scalar(0));
		
		// Particle volume
		scalar Volp(0);
		vector gradPg_int(0,0,0);
		
		for(int ii = 0; ii < particleList_.size(); ii++)
		{
			int index = particleList_[ii][0];
			
			cellID = sm.cellIDs()[index][0];
			position = sm.position(index);			    

                        Ufluid = UInterpolator_.interpolate(position,cellID); 
			Up = sm.velocity(index);
                        Ur = Ufluid-Up;

                        ds = 2*sm.radius(index);

                        // Calculate WenYu Drag 
                        voidfraction = voidfractionInterpolator_.interpolate(position,cellID);
                        nuf = nufField[cellID];
                        rhof = rho_[cellID];	
                        WenYuDragForce(Ur,ds,rhof,nuf,voidfraction,WenYuDrag);	
    
        		Volp = ds*ds*ds*M_PI/6;
			gradPg_int = gradPgInterpolator_.interpolate(position,cellID);
			
			//if (cellID > -1)  // particle centre is in domain
            		//{
				if(weighting_)
				{
					labelList& cellsNeigh = neighbourCells[cellID];
					sumWeights = 0;
					dist_s = 0;

					//Info << " index = " << index << " ii = " << ii << " cellID = " << cellID << endl;

					forAll(cellsNeigh,jj)
					{
						// Find distances between particle and neighbouring cells					
						dist_s = mag(sm.mesh().C()[cellsNeigh[jj]]-position)/pow(sm.mesh().V()[cellsNeigh[jj]],1./3.);

						if(dist_s <= 0.5)
						{		
							particleWeights[ii][jj] =  1./4.*pow(dist_s,4)-5./8.*pow(dist_s,2)+115./192.;
						}
						else if (dist_s > 0.5 && dist_s <= 1.5)
						{		
							particleWeights[ii][jj] = -1./6.*pow(dist_s,4)+5./6.*pow(dist_s,3)-5./4.*pow(dist_s,2)+5./24.*dist_s+55./96.;
						}
						else if (dist_s > 1.5 && dist_s <= 2.5)
						{		
							particleWeights[ii][jj] =  pow(2.5-dist_s,4)/24.;
						}
						else
						{		
							particleWeights[ii][jj] = 0;
						}

						sumWeights += particleWeights[ii][jj];

					}	

					forAll(cellsNeigh,jj)
					{	
						if ( sumWeights != 0 )
						{
							Up_[cellID] 	         +=  Up*particleWeights[ii][jj]/sumWeights;
							MappedDragForce_[cellID] += (WenYuDrag + Volp * gradPg_int) * particleWeights[ii][jj]/sumWeights;
						}
						else
						{
							Up_[cellID] 		 = vector(0,0,0);
							MappedDragForce_[cellID] = vector(0,0,0);	
						}
					}
				}
				else
				{