RecognitionResult GeometricRecognizer::recognize(Path2D points)
	{
		//--- Make sure we have some templates to compare this to
		//---  or else recognition will be impossible
        
        if (points.size() < 5){
            return RecognitionResult("Unknown", NULL);
        }
        
        if (templates.empty())
		{
			std::cout << "No templates loaded so no symbols to match." << std::endl;
			return RecognitionResult("Unknown", NULL);
		}

		points = normalizePath(points);
	
		//--- Initialize best distance to the largest possible number
		//--- That way everything will be better than that
		double bestDistance = MAX_DOUBLE;
		//--- We haven't found a good match yet
		int indexOfBestMatch = -1;

		//--- Check the shape passed in against every shape in our database
		for (int i = 0; i < (int)templates.size(); i++)
		{
			//--- Calculate the total distance of each point in the passed in
			//---  shape against the corresponding point in the template
			//--- We'll rotate the shape a few degrees in each direction to
			//---  see if that produces a better match
			double distance = distanceAtBestAngle(points, templates[i]);
			if (distance < bestDistance)
			{
				bestDistance     = distance;
				indexOfBestMatch = i;
			}
		}

		//--- Turn the distance into a percentage by dividing it by 
		//---  half the maximum possible distance (across the diagonal 
		//---  of the square we scaled everything too)
		//--- Distance = hwo different they are
		//--- Subtract that from 1 (100%) to get the similarity
		double score = 1.0 - (bestDistance / halfDiagonal);

		//--- Make sure we actually found a good match
		//--- Sometimes we don't, like when the user doesn't draw enough points
		if (-1 == indexOfBestMatch)
		{
			//cout << "Couldn't find a good match." << endl;
			return RecognitionResult("Unknown", 1);
		}

        cout<<score<<endl;
		RecognitionResult bestMatch(templates[indexOfBestMatch].name, score);
		return bestMatch;
	};
	RecognitionResult transform_results(const MultiClassViolaJonesRecognitionResult& source)
	{
		std::vector<PattCutLib::ResultPerClassifier> result_by_classif;
		result_by_classif.reserve(source.get_overall_results().size());
		for (auto& per_cls_res : source.get_overall_results())
		{
			std::vector<PattCutLib::CapturedArea> areas;
			areas.reserve(per_cls_res.get_areas_info().size());
			for (auto& area : per_cls_res.get_areas_info())
			{
				auto& inner_area_info = area.get_inner_area_info();
				auto& rect = area.get_inner_area_info().get_area();
				areas.push_back(PattCutLib::CapturedArea(rect.left(), rect.top(), rect.width(), rect.height(),
														area.get_accuracy(), area.get_is_good(),
														inner_area_info.get_wnd_count(), inner_area_info.get_comb_boosted_classifier_value_max()));
			}


			result_by_classif.push_back(PattCutLib::ResultPerClassifier(
									per_cls_res.get_class_id(), per_cls_res.get_rec_time_ms(), 
									per_cls_res.get_wnd_count(), per_cls_res.get_wnd_after_prefilter(),
									per_cls_res.get_mean_depth(), per_cls_res.get_mean_weak_count(),
									std::move(areas)));
		}

		int best_classif_id = source.get_best_classif_id();
		if (best_classif_id < 0)
			best_classif_id = 0;

		return RecognitionResult(best_classif_id, source.get_overall_time_ms(), std::move(result_by_classif));
	}
	RecognitionResult RecognitionController::perform_recognition(const PattCutLib::Image& image, const std::vector<int>& classifier_ids, int min_object_size, int max_object_size, double scale_factor, double step_factor) const
	{
		if (scale_factor >= 0 && scale_factor <= 1.0)
			throw RecognitionException("Argument 'scale_factor' should be greater than 1.0 (or less than 0 to use as default value)");

		MultiClassViolaJonesController* controller = (MultiClassViolaJonesController*)_multi_class_viola_jones_controller;
		::Image* image_ptr = extract_image_ptr(image);

		try
		{
			if (image_ptr == nullptr)
				return RecognitionResult();

			auto rec_result = controller->recognize(*image_ptr, classifier_ids, min_object_size, max_object_size, scale_factor, step_factor);
			return transform_results(rec_result);
		}
		catch (LibraryException& l_exc)
		{
			throw RecognitionException(std::string("Library exception: ") + l_exc.what());
		}
	}
Ejemplo n.º 4
0
void SimondConnector::messageReceived()
{
    waitFor(sizeof(qint32));
    qint32 code;

    *response >> code;
    qDebug() << "Message received: " << code;
    switch (code) {
    case Simond::VersionIncompatible:
        emit error(tr("Server version incompatible with client."));
        disconnectFromServer();
        break;
    case Simond::AuthenticationFailed:
        emit error(tr("Authentication failed. Please check the configured user name and password in the configuration."));
        disconnectFromServer();
        break;
    case Simond::LoginSuccessful:
        emit status(tr("Logged in"));
        break;
    case Simond::RecognitionReady:
        emit status(tr("Recognition ready; Activating..."));
        sendRequest(Simond::StartRecognition);
        break;
    case Simond::RecognitionStarted:
        emit status("Recognition activated");
        emit connectionState(Connected);
        break;
    case Simond::RecognitionError: {
        parseLength();
        QByteArray errorMessage, protocol;
        *response >> errorMessage;
        *response >> protocol;
        emit error(tr("Recognition reported error: ").arg(QString::fromUtf8(errorMessage)));
        disconnectFromServer();
        break;
    }
    case Simond::RecognitionWarning: {
        parseLength();
        QByteArray warningMessage;
        *response >> warningMessage;
        emit error(tr("Recognition reported warning: %1").arg(QString::fromUtf8(warningMessage)));
        break;
    }
    case Simond::RecognitionStopped:
        break; //nothing to do
    case Simond::RecognitionResult: {
        parseLength();

        qint8 sentenceCount;
        *response >> sentenceCount;
        RecognitionResultList recognitionResults;

        for (int i=0; i < sentenceCount; i++) {
          QByteArray word, sampa, samparaw;
          QList<float> confidenceScores;
          *response >> word;
          *response >> sampa;
          *response >> samparaw;
          *response >> confidenceScores;
          recognitionResults.append(RecognitionResult(QString::fromUtf8(word),
            QString::fromUtf8(sampa),
            QString::fromUtf8(samparaw),
            confidenceScores));
        }
        emit recognized(recognitionResults);
        break;
    }

    default:
        qDebug() << "Unhandled request: " << code;
    }
    if (socket->bytesAvailable())
        messageReceived();
}