Ejemplo n.º 1
0
const RectangleF* FieldController::FindClosestRectangles(RectangleF* pRectangle, Chromosome* pChromosome, bool bDirection)
{
	std::vector<const RectangleF*> pModels = FindIntersectedRectangles(pRectangle, pChromosome, bDirection);

	const RectangleF* pResultModel = NULL;

	Vector2F projectionAxis;
	if (!bDirection)
		projectionAxis = Vector2F(0, 1);
	else
		projectionAxis = Vector2F(1, 0);

	Vector2F min, max, modelMin, modelMax;
	float minLength = 12345678.f;

	GetProjection(pRectangle, projectionAxis, modelMin, modelMax);

	for (int i = 0, i_end = pModels.size(); i < i_end; ++i)
	{
		GetProjection(pModels[i], projectionAxis, min, max);
		// we are interested in one side models only 
		if (min > modelMin)
		{
			Vector2F lengthVec = (modelMax - min);
			float tempLength = lengthVec.X() * lengthVec.X() + lengthVec.Y() * lengthVec.Y();
			if (tempLength < minLength)
			{
				pResultModel = pModels[i];
				minLength = tempLength;
			}
		}
	}

	return pResultModel;
}
Ejemplo n.º 2
0
Matrix4 Camera::GetGPUProjection() const
{
#ifndef URHO3D_OPENGL
    return GetProjection(); // Already matches API-specific format
#else
    // See formulation for depth range conversion at http://www.ogre3d.org/forums/viewtopic.php?f=4&t=13357
    Matrix4 ret = GetProjection();

    ret.m20_ = 2.0f * ret.m20_ - ret.m30_;
    ret.m21_ = 2.0f * ret.m21_ - ret.m31_;
    ret.m22_ = 2.0f * ret.m22_ - ret.m32_;
    ret.m23_ = 2.0f * ret.m23_ - ret.m33_;

    return ret;
#endif
}
Ejemplo n.º 3
0
    //------------------------------------------------------------------------------
    //------------------------------------------------------------------------------
    Ray CameraComponent::Unproject(const Vector2 &invScreenPos)
    {
        Matrix4 matProj = Matrix4::Inverse((GetView() * GetProjection()));
        
        Vector2 vScreenSize = m_screen->GetResolution();
        //Normalise the screen space co-ordinates into clip space
        f32 nx = ((2.0f * (invScreenPos.x/vScreenSize.x)) - 1.0f);
        f32 ny = ((2.0f * (invScreenPos.y/vScreenSize.y)) - 1.0f);
        
        Vector4 vNear(nx, ny, -1.0f, 1.0f);
        Vector4 vFar(nx,ny, 1.0f, 1.0f);
        vNear = vNear * matProj;
        vFar = vFar * matProj;
        
        Ray cRay;
        
        vNear /= vNear.w;
        cRay.vOrigin = Vector3(vNear.x, vNear.y, vNear.z);

        vFar /= vFar.w;
        cRay.vDirection = Vector3(vFar.x, vFar.y, vFar.z) - cRay.vOrigin;
        
        cRay.fLength = cRay.vDirection.Length();
        cRay.vDirection /= cRay.fLength;
        
        return cRay;
    }
Ejemplo n.º 4
0
void Camera::UpdateMatrices()
{
	projection = GetProjection();
	view = gameObject->transform->GetMatrix();
	//view.ViewInverse();
	view.Invert();
}
Ejemplo n.º 5
0
const Matrix4& Camera::GetProjection() const
{
    if (projectionDirty_)
    {
        projection_ = GetProjection(true);
        projectionDirty_ = false;
    }

    return projection_;
}
Ejemplo n.º 6
0
    void Process(RF_Collect::Queue<Comp::RendererMessage>& Out)
    {
        m_SharedTransformUniforms.ModelView = m_Camera.GetMatrix();
        m_SharedTransformUniforms.ModelViewProjection = GetProjection() * m_SharedTransformUniforms.ModelView;

//        Comp::RendererMessage msg;
//        msg.What = Comp::RendererMessage::Type::OpenGLCommand;
//        msg.GLMachineCommandBuffer = cmdBuffer.Data().Clone();
//        Out.Enqueue(msg);
    }
Ejemplo n.º 7
0
bool FieldController::Intersect(const RectangleF* m0, const RectangleF* m1, bool bDirection)
{
	Vector2F dir;

	if (bDirection && (m0->GetBottomRight().Y() > m1->GetTopLeft().Y()))
		return false;


	if (bDirection)
		dir = Vector2F(0, 1);
	else
		dir = Vector2F(1, 0);

	Vector2F min0, max0, min1, max1;
	GetProjection(m0, dir, min0, max0);
	GetProjection(m1, dir, min1, max1);

	//if (max0.X() > min1.X() || max0.Y() > min1.Y())
	//	return true;

	if (bDirection)
	{
		if ((max0.X() > min1.X() && max0.X() < max1.X()) ||
			(min0.X() > min1.X() && min0.X() < max1.X()) ||
			(max1.X() > min0.X() && max1.X() < max0.X()) ||
			(min1.X() > min0.X() && min1.X() < max0.X()) ||
			min0.X() == min1.X() || max0.X() == max1.X())
			return true;
	}
	else
	{
		if ((max0.Y() > min1.Y() && max0.Y() < max1.Y()) ||
			(min0.Y() > min1.Y() && min0.Y() < max1.Y()) ||
			(max1.Y() > min0.Y() && max1.Y() < max0.Y()) ||
			(min1.Y() > min0.Y() && min1.Y() < max0.Y()) ||
			min0.Y() == min1.Y() || max0.Y() == max1.Y())
			return true;
	}

	return false;
}
Ejemplo n.º 8
0
bool 
AirspaceWarningManager::UpdateInside(const AircraftState& state,
                                     const GlidePolar &glide_polar)
{
  if (!glide_polar.IsValid())
    return false;

  bool found = false;

  AirspacePredicateAircraftInside condition(state);

  Airspaces::AirspaceVector results = airspaces.FindInside(state, condition);
  for (const auto &i : results) {
    const AbstractAirspace &airspace = i.GetAirspace();

    if (!airspace.IsActive())
      continue; // ignore inactive airspaces

    if (!config.IsClassEnabled(airspace.GetType()))
      continue;

    AirspaceWarning *warning = GetWarningPtr(airspace);

    if (warning == nullptr ||
        warning->IsStateAccepted(AirspaceWarning::WARNING_INSIDE)) {
      GeoPoint c = airspace.ClosestPoint(state.location, GetProjection());
      const AirspaceAircraftPerformance perf_glide(glide_polar);
      AirspaceInterceptSolution solution;
      airspace.Intercept(state, c, GetProjection(), perf_glide, solution);

      if (warning == nullptr)
        warning = GetNewWarningPtr(airspace);

      warning->UpdateSolution(AirspaceWarning::WARNING_INSIDE, solution);
      found = true;
    }
  }

  return found;
}
Ejemplo n.º 9
0
bool Collision::SeparatingAxisTheorem::TestTwoObjects(Rendering::SceneObject *obj1, Rendering::SceneObject *obj2)
{
	std::vector<glm::vec3> axes = GetTestingAxes(obj1, obj2);
//	std::vector<glm::vec3> axes2 = GetTestingAxes(obj2, TODO);

	//axes.insert(axes.end(), axes2.begin(), axes2.end());

	for (auto axis : axes)
	{
		if (!TestProjectionOverlap(GetProjection(obj1, axis), GetProjection(obj2, axis)))
		{
			return false;
		}
	}

// 	for (auto axis : axes2)
// 	{
// 		if (!TestProjectionOverlap(GetProjection(obj1, axis), GetProjection(obj2, axis)))
// 		{
// 			return false;
// 		}
// 	}
	return true;
}
Ejemplo n.º 10
0
void
Camera::Bind(const mat4& model) {
    // XXX: could cache this, but waiting until the need arises
    mat4 projection = GetProjection();
    mat4 view = GetView();
    mat4 modelView = view * model;
    glUniformMatrix4fv(u("Projection"), 1, 0, ptr(projection));
    glUniformMatrix4fv(u("ViewMatrix"), 1, 0, ptr(view));
    glUniformMatrix4fv(u("ModelMatrix"), 1, 0, ptr(model));
    glUniformMatrix4fv(u("Modelview"), 1, 0, ptr(modelView));

    // Assume uniform scale for now to avoid expensive inverse:
    mat3 normalMatrix = mat3(modelView);
    glUniformMatrix3fv(u("NormalMatrix"), 1, 0, ptr(normalMatrix));
}
Ejemplo n.º 11
0
bool ScrollCamera::VOnMouseWheel( const Vector3& vPosition, const Vector3& vDelta )
{
	Matrix matScale;
	if ( vDelta.y > 0.0f )
	{
		matScale.BuildScale( 2.0f, 2.0f, 2.0f );
		m_fZoomModifier *= 0.5f;
	}
	else
	{
		m_fZoomModifier *= 2.0f;
		matScale.BuildScale( 0.5f, 0.5f, 0.5f );
	}

	SetProjection( GetProjection() * matScale );

	return false;
}
void OperatorToPlanTransformer::Visit(const PhysicalProject *) {
  auto project_prop = requirements_->GetPropertyOfType(PropertyType::PROJECT)
                          ->As<PropertyProjection>();
  (void)project_prop;

  size_t project_list_size = project_prop->GetProjectionListSize();

  // expressions to evaluate
  TargetList tl = TargetList();
  // columns which can be returned directly
  DirectMapList dml = DirectMapList();
  // schema of the projections output
  std::vector<catalog::Column> columns;

  for (size_t project_idx = 0; project_idx < project_list_size; project_idx++) {
    auto expr = project_prop->GetProjection(project_idx);
    std::string column_name;

    // if the root of the expression is a column value we can
    // just do a direct mapping
    if (expr->GetExpressionType() == ExpressionType::VALUE_TUPLE) {
      auto tup_expr = (expression::TupleValueExpression *)expr;
      column_name = tup_expr->GetColumnName();
      dml.push_back(
          DirectMap(project_idx, std::make_pair(0, tup_expr->GetColumnId())));
    }
    // otherwise we need to evaluat the expression
    else {
      column_name = "expr" + std::to_string(project_idx);
      tl.push_back(Target(project_idx, expr->Copy()));
    }
    columns.push_back(catalog::Column(
        expr->GetValueType(), type::Type::GetTypeSize(expr->GetValueType()),
        column_name));
  }
  // build the projection plan node and insert aboce the scan
  std::unique_ptr<planner::ProjectInfo> proj_info(
      new planner::ProjectInfo(std::move(tl), std::move(dml)));
  std::shared_ptr<catalog::Schema> schema_ptr(new catalog::Schema(columns));
  std::unique_ptr<planner::AbstractPlan> project_plan(
      new planner::ProjectionPlan(std::move(proj_info), schema_ptr));

  output_plan_ = std::move(project_plan);
}
Ejemplo n.º 13
0
    //------------------------------------------------------------------------------
    //------------------------------------------------------------------------------
    Vector2 CameraComponent::Project(const Vector3 &invWorldPos)
    {
        //Convert the world space position to clip space
        Matrix4 matToClip = (GetView() * GetProjection());
        Vector4 vScreenPos = Vector4(invWorldPos, 1.0f) * matToClip;
        
        Vector2 vScreenSize = m_screen->GetResolution();
        
        // Normalize co-ordinates
        vScreenPos.x = vScreenPos.x / vScreenPos.w;
        vScreenPos.y = vScreenPos.y / vScreenPos.w;

        //Convert from clip space to screen space
        vScreenPos.x = (vScreenSize.x * 0.5f)* vScreenPos.x + vScreenSize.x * 0.5f;
        vScreenPos.y = (vScreenSize.y * 0.5f)* vScreenPos.y + vScreenSize.y * 0.5f;

        //Return 2D screen space co-ordinates
        return Vector2(vScreenPos.x, vScreenPos.y);
    }
Ejemplo n.º 14
0
//xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
//
void Next()
{
  GetProjection(event, event);
  
  FindEdges(); 

  if( (start[1]-start[0])> MinStartDist ) // limit the burst distance
    {
      GetAmpSignals();
      
      GetPMTSignals();
      
      GetShpSignals();

      if( !(event%100) )
	{
	  DrawAll();  
	  DrawDistributions();
	  PrintAll();
	  printf("event: %i\n", event);
	}  
    }
  event++;

  if( event == event_lastt )
    {
      ftimer.Stop();
                  
      FitProfiles();
      
      PrintAll();

      DrawAll();  

      DrawDistributions();

      WriteAll();
      
      gSystem->Exit(1);
    }
}
Ejemplo n.º 15
0
Vector2 Camera::WorldToScreenPoint(const Vector3& worldPos) const
{
    Vector3 eyeSpacePos = GetView() * worldPos;
    Vector2 ret;

    if(eyeSpacePos.z_ > 0.0f)
    {
        Vector3 screenSpacePos = GetProjection(false) * eyeSpacePos;
        ret.x_ = screenSpacePos.x_;
        ret.y_ = screenSpacePos.y_;
    }
    else
    {
        ret.x_ = (-eyeSpacePos.x_ > 0.0f) ? -1.0f : 1.0f;
        ret.y_ = (-eyeSpacePos.y_ > 0.0f) ? -1.0f : 1.0f;
    }

    ret.x_ = (ret.x_ / 2.0f) + 0.5f;
    ret.y_ = 1.0f - ((ret.y_ / 2.0f) + 0.5f);
    return ret;
}
Ejemplo n.º 16
0
Ray Camera::GetScreenRay(float x, float y) const
{
    Ray ret;

    // If projection is invalid, just return a ray pointing forward
    if (!IsProjectionValid())
    {
        ret.origin_ = node_ ? node_->GetWorldPosition() : Vector3::ZERO;
        ret.direction_ = node_ ? node_->GetWorldDirection() : Vector3::FORWARD;
        return ret;
    }

    Matrix4 viewProjInverse = (GetProjection(false) * GetView()).Inverse();

    // The parameters range from 0.0 to 1.0. Expand to normalized device coordinates (-1.0 to 1.0) & flip Y axis
    x = 2.0f * x - 1.0f;
    y = 1.0f - 2.0f * y;
    Vector3 near(x, y, 0.0f);
    Vector3 far(x, y, 1.0f);

    ret.origin_ = viewProjInverse * near;
    ret.direction_ = ((viewProjInverse * far) - ret.origin_).Normalized();
    return ret;
}
Ejemplo n.º 17
0
void KeyClassifier::ClassifyKnownTauE(std::vector<KeySeq> &keys, 
                                      ZeromerModelBulk<double> &bg,
                                      Mat<double> &wellFlows,
                                      Mat<double> &refFlows,
                                      Col<double> &time,
                                      const Col<double> &incorp,
                                      double minSnr,
                                      double tauE,
                                      KeyFit &fit,
                                      TraceStore<double> &store,
                                      Mat<double> &predicted) {

  param.set_size(2);// << 0 << 0;
  param[0] = 0;
  param[1] = 0;
  fit.keyIndex = -1;
  fit.snr = 0;
  fit.sd = 0;
  fit.mad = -1;
  signal.set_size(wellFlows.n_cols);
  projSignal.set_size(wellFlows.n_cols); 
  Col<double> weights = ones<vec>(store.GetNumFrames());
  if (fit.bestKey >= 0) {
    fit.bestKey = -1;
  }
  fit.ok = -1;
  size_t frameStart = min(FRAME_START,wellFlows.n_rows);
  size_t frameEnd = min(FRAME_END,wellFlows.n_rows);
  for (size_t keyIx = 0; keyIx < keys.size(); keyIx++) {
    double keyMinSnr = std::max(minSnr, keys[keyIx].minSnr);
    double tauB = 0;
    bg.FitWell(fit.wellIdx, store, keys[keyIx], weights, mDist, mValues);
    param.at(0) = tauB;
    param.at(1) = tauE;
    onemerIncorpMad.Clear();
    onemerProjMax.Init(10);
    onemerProjMax.Clear();
    onemerSig.Clear();
    zeromerSig.Clear();
    zeroStats.Clear();
    traceSd.Clear();
    sigVar.Clear();
    onemerProj.Clear();
    for (size_t flowIx = 0; flowIx < wellFlows.n_cols; flowIx++) {
      bg.ZeromerPrediction(fit.wellIdx, flowIx, store, refFlows.unsafe_col(flowIx),p);
      double sig = 0;
      SampleStats<double> mad;
      diff = wellFlows.unsafe_col(flowIx) - p;
      for (size_t frameIx = frameStart; frameIx < frameEnd; frameIx++) {
        sig += diff.at(frameIx);
      }

      signal.at(flowIx) = sig;
      /* uvec indices;  */
      double pSig = std::numeric_limits<double>::quiet_NaN();
      if (incorp.n_rows == diff.n_rows) {
        pSig =  GetProjection(diff, incorp);
      }
      projSignal.at(flowIx) = pSig;
      sigVar.AddValue(sig);

      if (keys[keyIx].flows[flowIx] == 0) {
        for (size_t frameIx = frameStart; frameIx < frameEnd; frameIx++) {
          mad.AddValue(fabs(diff.at(frameIx)));
        }
        zeroStats.AddValue(mad.GetMean());
        zeromerSig.AddValue(sig);
      }
      else if (keys[keyIx].flows[flowIx] == 1 && flowIx < keys[keyIx].usableKeyFlows) {
        onemerSig.AddValue(sig);
        // double maxValue = 0;
        // for (size_t fIx = frameStart; fIx < frameEnd-1; fIx++) {
        //   maxValue = max(maxValue, (diff.at(fIx)+diff.at(fIx+1))/2);
        // }
        // projSignal.at(flowIx) = maxValue;
        // onemerProjMax.AddValue(maxValue);
        if (isfinite(pSig) && incorp.n_rows == p.n_rows) {
          onemerProj.AddValue(pSig);
          double maxSig = 0;
          for (size_t frameIx = frameStart; frameIx < frameEnd; frameIx++) {
            double projVal =  pSig * incorp.at(frameIx);
            maxSig = max(maxSig, projVal);
            onemerIncorpMad.AddValue(fabs(projVal - (wellFlows.at(frameIx,flowIx) - p.at(frameIx))));
          }
          onemerProjMax.AddValue(maxSig);
        }
      }
    }
    double snr = (onemerSig.GetMedian() - zeromerSig.GetMedian()) / ((onemerSig.GetIqrSd() + zeromerSig.GetIqrSd() + SDFUDGE)/2);
    float sd = sigVar.GetSD();
    if (!isfinite(sd) || isnan(sd)) {
      sd = 0;
    }
    if ((snr >= fit.snr || (isfinite(snr) && !isfinite(fit.snr))) && snr >= keyMinSnr ) {
      fit.keyIndex = keyIx;
      fit.bestKey = keyIx;
      fit.mad = zeroStats.GetMean();
      fit.snr = snr;
      fit.param = param;
      fit.sd = sd;
      fit.onemerAvg = onemerSig.GetCount() > 0 ? onemerSig.GetMedian() : std::numeric_limits<double>::quiet_NaN();
      fit.peakSig = onemerProjMax.GetCount() > 0 ? onemerProjMax.GetMedian() : std::numeric_limits<double>::quiet_NaN();
      fit.onemerProjAvg = onemerProj.GetCount() > 0 ? onemerProj.GetMean() : std::numeric_limits<double>::quiet_NaN();
      fit.projResid = onemerIncorpMad.GetCount() > 0 ? onemerIncorpMad.GetMean() : std::numeric_limits<double>::quiet_NaN();
      fit.ok = true;

      for (size_t flowIx = 0; flowIx < wellFlows.n_cols; flowIx++) {
        bg.ZeromerPrediction(fit.wellIdx, flowIx, store, refFlows.unsafe_col(flowIx),p);
        copy(p.begin(), p.end(), predicted.begin_col(flowIx));
      }
    }
    else if (keyIx == 0) { // || snr > fit.snr) { // just set default...
      fit.bestKey = keyIx;
      fit.mad = zeroStats.GetMean();
      fit.snr = snr;
      fit.param = param;
      fit.sd = sd;
      fit.onemerAvg = onemerSig.GetCount() > 0 ? onemerSig.GetMedian() : std::numeric_limits<double>::quiet_NaN();
      fit.peakSig = onemerProjMax.GetCount() > 0 ? onemerProjMax.GetMedian() : std::numeric_limits<double>::quiet_NaN();
      fit.onemerProjAvg = onemerProj.GetCount() > 0 ? onemerProj.GetMean() : std::numeric_limits<double>::quiet_NaN();
      fit.projResid = onemerIncorpMad.GetCount() > 0 ? onemerIncorpMad.GetMean() : std::numeric_limits<double>::quiet_NaN();
      fit.ok = true;

      for (size_t flowIx = 0; flowIx < wellFlows.n_cols; flowIx++) {
        bg.ZeromerPrediction(fit.wellIdx, flowIx, store, refFlows.unsafe_col(flowIx),p);
        copy(p.begin(), p.end(), predicted.begin_col(flowIx));
      }
    }

  }
  // Reset the params to the right key
  if (fit.keyIndex < 0) {
    bg.FitWell(fit.wellIdx, store, keys[0], weights, mDist, mValues);
  }
  else {
    bg.FitWell(fit.wellIdx, store, keys[fit.keyIndex], weights, mDist, mValues);
  }
  if (!isfinite(fit.mad)) {
    fit.ok = 0;
    fit.mad = std::numeric_limits<float>::max();
  }
}
Ejemplo n.º 18
0
bool vtElevLayer::TransformCoords(vtProjection &proj_new)
{
	VTLOG("vtElevLayer::TransformCoords\n");

	vtProjection proj_old;
	GetProjection(proj_old);

	if (proj_old == proj_new)
		return true;		// No conversion necessary

	bool success = false;
	if (m_pGrid)
	{
		// Check to see if the projections differ *only* by datum
		vtProjection test = proj_old;
		test.SetDatum(proj_new.GetDatum());
		if (test == proj_new)
		{
			success = m_pGrid->ReprojectExtents(proj_new);
		}
		else
		{
			bool bUpgradeToFloat = false;

			if (!m_pGrid->IsFloatMode())
			{
				if (g_Options.GetValueBool(TAG_REPRO_TO_FLOAT_NEVER))
					bUpgradeToFloat = false;
				else if (g_Options.GetValueBool(TAG_REPRO_TO_FLOAT_ALWAYS))
					bUpgradeToFloat = true;
				else if (!IsGUIApp())
				{
					// Be sure not to ask, if this is not a GUI app
					bUpgradeToFloat = false;
				}
				else
				{
					// Ask
					int res = wxMessageBox(_("Input grid is integer.  Use floating-point values in reprojected grid?"),
						_("query"), wxYES_NO);
					if (res == wxYES)
						bUpgradeToFloat = true;
				}
			}

			// actually re-project the grid elements
			vtElevationGrid *grid_new = new vtElevationGrid;

			vtElevError err;
			success = grid_new->ConvertProjection(m_pGrid, proj_new,
				bUpgradeToFloat, progress_callback, &err);

			if (success)
			{
				delete m_pGrid;
				m_pGrid = grid_new;
				ReImage();
			}
			else
			{
				wxString msg((const char *) err.message, wxConvUTF8);
				wxMessageBox(msg, _("Error"));
				delete grid_new;
			}
		}
	}
	if (m_pTin)
	{
		success = m_pTin->ConvertProjection(proj_new);
	}
	SetModified(true);

	return success;
}
Ejemplo n.º 19
0
Matrix4f Camera::GetViewProjection()
{
    return GetProjection() * GetView();
}