TEST_P(tttL1L2RichardsonLucyDeconvolutionTest,General){
	std::string inputFileName(std::get<0>(GetParam()));
	std::string kernelFileName(std::get<1>(GetParam()));

	std::cerr << "Running for input" << inputFileName << std::endl;
	m_InputReader->SetFileName(inputFileName);
	m_KernelReader->SetFileName(kernelFileName);
	m_InputReader->Update();
	m_KernelReader->Update();
	m_InputReader->GetOutput()->SetSpacing(m_KernelReader->GetOutput()->GetSpacing());

	double alpha= std::get<2>(GetParam());
	double beta = std::get<3>(GetParam());
	int iterations = std::get<4>(GetParam());
	m_Deconvoluter->SetAlpha(alpha);
	m_Deconvoluter->SetBeta(beta);
	m_Deconvoluter->NormalizeOn();
	m_Deconvoluter->SetNumberOfIterations(iterations);
	itk::SimpleFilterWatcher deconvoluterWatch(m_Deconvoluter);
    m_Writer->SetFileName(std::get<5>(GetParam()));
    try{
    	m_Writer->Update();
    }catch ( itk::ExceptionObject & e ){
        std::cerr << "Unexpected exception caught when writing deconvolution image: "
                  << e << std::endl;
    }

	ASSERT_EQ(this->m_Deconvoluter->GetIteration(),iterations);

};
예제 #2
0
void KisExrTest::testRoundTrip()
{
    QString inputFileName(TestUtil::fetchDataFileLazy("CandleGlass.exr"));

    KisDocument *doc1 = KisPart::instance()->createDocument();

    KisImportExportManager manager(doc1);
    manager.setBatchMode(true);

    KisImportExportFilter::ConversionStatus status;
    QString s = manager.importDocument(inputFileName, QString(),
                                       status);

    QCOMPARE(status, KisImportExportFilter::OK);
    QVERIFY(doc1->image());


    QTemporaryFile savedFile(QDir::tempPath() + QLatin1String("/krita_XXXXXX") + QLatin1String(".exr"));
    savedFile.setAutoRemove(false);
    savedFile.open();

    QUrl savedFileURL("file://" + savedFile.fileName());
    QString savedFileName(savedFileURL.toLocalFile());

    QString typeName = KisMimeDatabase::mimeTypeForFile(savedFileURL.toLocalFile());

    QByteArray mimeType(typeName.toLatin1());
    status = manager.exportDocument(savedFileName, mimeType);
    QVERIFY(QFileInfo(savedFileName).exists());

    {
        KisDocument *doc2 = KisPart::instance()->createDocument();

        KisImportExportManager manager(doc2);
        manager.setBatchMode(true);

        s = manager.importDocument(savedFileName, QString(), status);

        QCOMPARE(status, KisImportExportFilter::OK);
        QVERIFY(doc2->image());

        QVERIFY(TestUtil::comparePaintDevicesClever<half>(
                    doc1->image()->root()->firstChild()->paintDevice(),
                    doc2->image()->root()->firstChild()->paintDevice(),
                    0.01 /* meaningless alpha */));

        delete doc2;
    }

    savedFile.close();

    delete doc1;

}
예제 #3
0
int main(int argc, char** argv)
{
	if(argc < 2 || argc > 3)
	{
		cerr << "Usage: " << argv[0] << " objfile.obj [texture directory]" << endl;
		return -1;
	}
	if(strlen(argv[1]) < 5)
	{
		cerr << "Must end in .obj" << endl;
		return -2;
	}
	string inputFileName(argv[1]);
	string outFileName(inputFileName.substr(0, strlen(argv[1]) - 4) + ".db");
	wavefrontToSQLite(argv[1], outFileName.c_str());
	return 0;
}
예제 #4
0
int main(int argc, char *argv[])
{

    //test pour savoir si l'utilisateur a renseigne un parametre
    if(argc <= 3)
    {
        std::cout<<"---------------------------------------"<<std::endl<<
                   "Veuillez rentrer la methode choisie :  "<<std::endl<<
                   "- template_tracking"                    <<std::endl<<
                   "- LK_tracking"                          <<std::endl<<
                   "- farneback"                            <<std::endl<<
                   "- camshift_kalman"                      <<std::endl<<
                   "- background_lk"                        <<std::endl<<
                   "- background_kalman"         <<std::endl<<std::endl<<
                   "Le type de format : "                   <<std::endl<<
                   "- video"                                <<std::endl<<
                   "- image"                     <<std::endl<<std::endl<<
                   "Le nom du fichier d'input"              <<std::endl<<
                   "---------------------------------------"<<std::endl;
        std::exit(EXIT_FAILURE);
    }



    //------------------VARIABLES----------------------------------------------//


    //Variables communes

    choiceAlgo algo;
    formatVideo format;
    std::string inputFileName(argv[3]);
    int nbTrames = 501;
    double fps = 0;

    std::vector<cv::Mat> sequence;

    cv::Mat previousSequence;

    int nbPedestrians = 0;

    cv::HOGDescriptor hog;
    hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());

    std::vector<cv::Rect> detectedPedestrian;


    // HOG + Good feature to track + LK
    std::vector<std::vector<cv::Point2f>> featuresDetected;
    std::vector<std::vector<cv::Point2f>> previousFeaturesDetected;



    // HOG + Template tracking
    std::vector<cv::Rect> boxes;
    std::vector<cv::Rect> previousBoxes;



    // Optical flow farneback
    cv::Mat flow;
    cv::Mat imGray;
    cv::Mat imGrayPrev;



    //camshift and kalman filter
    std::vector<cv::MatND> backProj;
    std::vector<cv::Rect> roiHogDetected;
    std::vector<cv::Rect> roiCamShift;
    std::vector<bool> detected;
    std::vector<cv::Mat> hist;
    std::vector<cv::RotatedRect> rectCamShift;
    cv::Point2f rect_points[4];


    //--------------------------------------------------------------------------------------//
    //Background substraction, pour le tracking LK et goodfeaturestotrack regarder au dessus
    trackingOption tracking;
    cv::Ptr<cv::BackgroundSubtractor> pMOG2;
    std::vector<cv::Rect> detectedPedestrianFiltered;

    cv::KalmanFilter KF(4,2,0,CV_32F);
    cv::Mat_<float> measurement(2,1);
    cv::Mat prediction;
    cv::Mat estimated;

    pMOG2 = cv::createBackgroundSubtractorMOG2();


    //Avec ajout du Haar cascade classifier
    std::vector<std::vector<cv::Rect>> rect_upper_body;
    cv::CascadeClassifier classifier;
    std::string upper_body_cascade_name = "haarcascade_fullbody.xml";

    if(!classifier.load(upper_body_cascade_name))
    {
        std::cout<<"le fichier "<<upper_body_cascade_name<<" ne peut etre charge"<<std::endl;
        return -1;
    }

    //--------------------------------------------------------------------------------------//


    //Background substraction and Kalman
    bool initKalman = true;
    cv::KalmanFilter Kalman(4,2,0,CV_32F);
    cv::Mat_<float> measurmt(2,1);
    cv::Mat predict;
    cv::Mat estim;

    Kalman.transitionMatrix.at<float>(0,0) = 1;
    Kalman.transitionMatrix.at<float>(0,1) = 0;
    Kalman.transitionMatrix.at<float>(0,2) = 1;
    Kalman.transitionMatrix.at<float>(0,3) = 0;
    Kalman.transitionMatrix.at<float>(1,0) = 0;
    Kalman.transitionMatrix.at<float>(1,1) = 1;
    Kalman.transitionMatrix.at<float>(1,2) = 0;
    Kalman.transitionMatrix.at<float>(1,3) = 1;
    Kalman.transitionMatrix.at<float>(2,0) = 0;
    Kalman.transitionMatrix.at<float>(2,1) = 0;
    Kalman.transitionMatrix.at<float>(2,2) = 1;
    Kalman.transitionMatrix.at<float>(2,3) = 0;
    Kalman.transitionMatrix.at<float>(3,0) = 0;
    Kalman.transitionMatrix.at<float>(3,1) = 0;
    Kalman.transitionMatrix.at<float>(3,2) = 0;
    Kalman.transitionMatrix.at<float>(3,3) = 1;

    measurmt.setTo(cv::Scalar(0));

    cv::setIdentity(Kalman.measurementMatrix);
    cv::setIdentity(Kalman.processNoiseCov, cv::Scalar::all(1e-4));
    cv::setIdentity(Kalman.measurementNoiseCov, cv::Scalar::all(1e-1));
    cv::setIdentity(Kalman.errorCovPost, cv::Scalar::all(.1));


    cv::Rect rectK;
    cv::Rect prevRectK;

    //acquisition de la video
    algo = detectAlgo(std::string(argv[1]));
    format = detectFormat(std::string(argv[2]));





    //------------------VIDEO--------------------------------------------------//

    if(format == SEQUENCE_IMAGE)
        sequence.resize(nbTrames);
    else if(format == VIDEO)
        std::cout<<"video"<<std::endl;

    extractVideoData(sequence, format, inputFileName, nbTrames, fps);

    cv::namedWindow("Video", cv::WINDOW_AUTOSIZE);



    //------------------TRAITEMENT-VIDEO---------------------------------------//

    for(int i=0;i<nbTrames;i++)
    {

        if(i>0)
            previousSequence = sequence[i-1];
        else
            previousSequence = sequence[i];




        ///------------------HOG + Good Features to track + LK-----------------//

        if(algo == HOG_GOODFEATURESTOTRACK_LK)
        {
            if(i%20 == 0)
            {
                detectedPedestrian = hogDetection(sequence[i], hog);
                nbPedestrians = detectedPedestrian.size();

                if(nbPedestrians != 0)
                {
                    featuresDetected = featuresDetection(sequence[i], detectedPedestrian);
                    previousFeaturesDetected.resize(featuresDetected.size());
                    previousFeaturesDetected = featuresDetected;
                }
            }
            else if(previousFeaturesDetected.size() != 0)
            {
                featuresDetected = lucasKanadeTracking(previousSequence, sequence[i], previousFeaturesDetected);

                previousFeaturesDetected.clear();
                previousFeaturesDetected.resize(featuresDetected.size());
                previousFeaturesDetected = featuresDetected;
            }


            //--------Representation--------------------

            /*
            cv::Scalar myColor;

            for(size_t j=0;j<featuresDetected.size();j++)
            {
                if(j%3 == 0)
                    myColor = cv::Scalar(0,0,cv::RNG().uniform(200,255));

                else if(j%2 == 0)
                    myColor = cv::Scalar(0,cv::RNG().uniform(200,255),0);

                else
                    myColor = cv::Scalar(cv::RNG().uniform(200,255),0,0);

                for(size_t k=0;k<featuresDetected[j].size();k++)
                {
                    cv::circle(sequence[i], featuresDetected[j][k], 1, myColor,-1);
                }
            }
            */


            for(size_t j=0;j<featuresDetected.size();j++)
            {
                cv::rectangle(sequence[i], cv::boundingRect(featuresDetected[j]), cv::Scalar( 0, 0, 255), 2, 8, 0 );
            }


            //affichage de la video
            cv::imshow("Video", sequence[i]);
        }





        ///------------------HOG + Template Tracking---------------------------//

        else if(algo == HOG_TEMPLATE_TRACKING)
        {
            if(i%20 == 0)
            {
                detectedPedestrian = hogDetection(sequence[i], hog);
                nbPedestrians = detectedPedestrian.size();

                if(nbPedestrians != 0)
                {
                    boxes = templateTracking(sequence[i], detectedPedestrian, CV_TM_CCORR_NORMED);
                    previousBoxes.resize(boxes.size());
                    previousBoxes = boxes;
                }
            }
            else if(previousBoxes.size() != 0)
            {
                boxes = templateTracking(sequence[i], previousBoxes, CV_TM_CCORR_NORMED);

                previousBoxes.clear();
                previousBoxes.resize(boxes.size());
                previousBoxes = boxes;
            }

            //--------Representation--------------------

            for(size_t j=0;j<boxes.size();j++)
            {
                cv::rectangle(sequence[i], boxes[j], cv::Scalar( 0, 0, 255), 2, 8, 0 );
            }


            //affichage de la video
            cv::imshow("Video", sequence[i]);
        }






        ///------------------HOG + Optical Flow Farneback----------------------//

        else if(algo == OPT_FLOW_FARNEBACK)
        {
            if(i!=0)
            {
                flow = cv::Mat::zeros(sequence[i].size(), CV_32FC2);
                cv::cvtColor(sequence[i], imGray, CV_BGR2GRAY);
                cv::cvtColor(sequence[i-1], imGrayPrev, CV_BGR2GRAY);

                cv::calcOpticalFlowFarneback(imGrayPrev, imGray, flow, 0.5, 3, 15, 3, 5, 1.2, 0);


                //-----------------Representation------------------------------//

                drawOptFlowMap(flow, imGrayPrev, 16, CV_RGB(0, 255, 0)); //dessin test

                //affichage de la video
                cv::imshow("Video", imGrayPrev);
            }
        }





        ///--------------HOG+Camshift + Kalman Filter--------------------------//

        else if(algo == CAMSHIFT_KALMAN_FILTER)
        {

            //camshift
            if(i%20 == 0 && roiCamShift.size() == 0)
            {
                roiHogDetected = hogDetection(sequence[i], hog);
                refineROI(roiCamShift, detected, roiHogDetected);

                //test
                if(roiCamShift.size() != 0)
                {
                    /*
                    roiCamShift[0].x += 30;
                    roiCamShift[0].width -= 60;
                    roiCamShift[0].y += 40;
                    roiCamShift[0].height -= 100;
                    */
                    roiCamShift[0].x += roiCamShift[0].width/2;
                    roiCamShift[0].width = roiCamShift[0].width/3;
                    //roiCamShift[0].y += roiCamShift[0].height/2;
                    roiCamShift[0].height = roiCamShift[0].height/3;
                }
                //
            }

            backProj = computeProbImage(sequence[i], roiCamShift, hist, detected);

            if (roiCamShift.size() != 0)
                cv::imshow("temp", backProj[0]);
            ///-------Test-Camshift--------------------///

            rectCamShift.resize(roiCamShift.size());

            for(size_t j=0;j<roiCamShift.size();j++)
            {
                /*
                std::cout<<roiCamShift[j]<<std::endl;
                cv::rectangle(backProj[j], roiCamShift[j], cv::Scalar( 255, 0, 0), 2, 8, 0 ); //DEBUG
                cv::imshow("before camshift", backProj[j]);
                cv::waitKey(0);
                */
                cv::Rect rectMeanShift;
                rectMeanShift = roiCamShift[j];
                cv::meanShift(backProj[j], rectMeanShift, cv::TermCriteria(cv::TermCriteria::EPS | cv::TermCriteria::COUNT, 10, 1));

                cv::rectangle(sequence[i], rectMeanShift, cv::Scalar( 0, 255, 0), 2, 8, 0 );

                rectCamShift[j] = cv::CamShift(backProj[j], roiCamShift[j], cv::TermCriteria(cv::TermCriteria::EPS | cv::TermCriteria::COUNT, 10, 1));
                rectCamShift[j].points(rect_points);


                for(int k = 0; k < 4; k++)
                    cv::line(sequence[i], rect_points[k], rect_points[(k+1)%4], cv::Scalar( 0, 0, 255), 2, 8);
            }
            ///----------------------------------------///

            //-----------------Representation----------------------------------//

            //dessin du rectangle


            for(size_t j=0;j<roiCamShift.size();j++)
                cv::rectangle(sequence[i], roiCamShift[j], cv::Scalar( 255, 0, 0), 2, 8, 0 );

            //affichage de la video
            cv::imshow("Video", sequence[i]);
        }






        ///------------------BACKGROUND-SUBSTRACTION---------------------------//

        else if(algo == BACKGROUND_LK)
        {
            if(i%10 == 0) //égal 0 pour le test
            {
                backgroundSubstractionDetection(sequence[i], detectedPedestrianFiltered, pMOG2, tracking);
            }

            if(tracking == GOOD_FEATURES_TO_TRACK)
            {
                featuresDetected.resize(detectedPedestrianFiltered.size());
                featuresDetected = featuresDetection(sequence[i], detectedPedestrianFiltered);
                previousFeaturesDetected.resize(featuresDetected.size());
                previousFeaturesDetected = featuresDetected;

                tracking = LUCAS_KANADE;

                KF.transitionMatrix.at<float>(0,0) = 1;
                KF.transitionMatrix.at<float>(0,1) = 0;
                KF.transitionMatrix.at<float>(0,2) = 1;
                KF.transitionMatrix.at<float>(0,3) = 0;
                KF.transitionMatrix.at<float>(1,0) = 0;
                KF.transitionMatrix.at<float>(1,1) = 1;
                KF.transitionMatrix.at<float>(1,2) = 0;
                KF.transitionMatrix.at<float>(1,3) = 1;
                KF.transitionMatrix.at<float>(2,0) = 0;
                KF.transitionMatrix.at<float>(2,1) = 0;
                KF.transitionMatrix.at<float>(2,2) = 1;
                KF.transitionMatrix.at<float>(2,3) = 0;
                KF.transitionMatrix.at<float>(3,0) = 0;
                KF.transitionMatrix.at<float>(3,1) = 0;
                KF.transitionMatrix.at<float>(3,2) = 0;
                KF.transitionMatrix.at<float>(3,3) = 1;

                measurement.setTo(cv::Scalar(0));

                for(size_t j=0;j<featuresDetected.size();j++)
                {
                    detectedPedestrianFiltered[j] = cv::boundingRect(featuresDetected[j]);
                }

                KF.statePre.at<float>(0) = rectCenter(detectedPedestrianFiltered[0]).x;
                KF.statePre.at<float>(1) = rectCenter(detectedPedestrianFiltered[0]).y;
                KF.statePre.at<float>(2) = 0;
                KF.statePre.at<float>(3) = 0;

                cv::setIdentity(KF.measurementMatrix);
                cv::setIdentity(KF.processNoiseCov, cv::Scalar::all(1e-4));
                cv::setIdentity(KF.measurementNoiseCov, cv::Scalar::all(1e-1));
                cv::setIdentity(KF.errorCovPost, cv::Scalar::all(.1));
            }

            else if(tracking == LUCAS_KANADE)
            {
                for(size_t j=0;j<featuresDetected.size();j++)
                {
                    detectedPedestrianFiltered[j] = cv::boundingRect(featuresDetected[j]);
                }

                featuresDetected = lucasKanadeTracking(previousSequence, sequence[i], previousFeaturesDetected);

                previousFeaturesDetected.clear();
                previousFeaturesDetected.resize(featuresDetected.size());
                previousFeaturesDetected = featuresDetected;

                prediction = KF.predict();
                cv::Point predictPt(prediction.at<float>(0),prediction.at<float>(1));

                // Get mouse point
                measurement(0) = rectCenter(detectedPedestrianFiltered[0]).x;
                measurement(1) = rectCenter(detectedPedestrianFiltered[0]).y;

                cv::Point measPt(measurement(0),measurement(1));

                // The "correct" phase that is going to use the predicted value and our measurement
                cv::Mat estimated = KF.correct(measurement);
                cv::Point statePt(estimated.at<float>(0),estimated.at<float>(1));

                //cv::circle(sequence[i], measPt, 1, cv::Scalar(0,255,0), 7, 24);
                //cv::circle(sequence[i], predictPt, 1, cv::Scalar(0,255,255), 7, 24);
            }


            //--------Representation--------------------

            if(tracking != NOTHING_TO_TRACK)
                findUpperBody(classifier, sequence[i], detectedPedestrianFiltered, rect_upper_body);

            for(size_t j=0;j<featuresDetected.size();j++)
            {

                for(size_t k=0;k<rect_upper_body[j].size();k++)
                {
                    cv::rectangle(sequence[i], rect_upper_body[j][k], cv::Scalar( 0, 255, 0), 2, 8, 0 );
                }
                //detectedPedestrianFiltered[j] = cv::boundingRect(featuresDetected[j]);
                cv::rectangle(sequence[i], cv::boundingRect(featuresDetected[j]), cv::Scalar( 0, 0, 255), 2, 8, 0 );
            }


            //affichage de la video
            cv::imshow("Video", sequence[i]);
        }

        else if(algo == BACKGROUND_KALMAN)
        {
            int refresh = 6;

            if(i%refresh == 0)
            {
                backgroundSubstractionDetection(sequence[i], detectedPedestrianFiltered, pMOG2, tracking);
            }
            if(initKalman && detectedPedestrianFiltered.size() != 0)
            {
                Kalman.statePre.at<float>(0) = rectCenter(detectedPedestrianFiltered[0]).x;
                Kalman.statePre.at<float>(1) = rectCenter(detectedPedestrianFiltered[0]).y;
                Kalman.statePre.at<float>(2) = 0;
                Kalman.statePre.at<float>(3) = 0;

                initKalman = false;
            }

            if(detectedPedestrianFiltered.size() != 0)
            {
                predict = Kalman.predict();
                cv::Point predictPt(predict.at<float>(0),predict.at<float>(1));


                // The "correct" phase that is going to use the predicted value and our measurement
                if(i%refresh == 0)
                {
                    rectK = findROI(predictPt, detectedPedestrianFiltered);

                    if(!fusionRects(prevRectK, rectK))
                    {
                        cv::Point refPoint = rectCenter(rectK);


                        // Get center point
                        measurmt(0) = refPoint.x;
                        measurmt(1) = refPoint.y;


                        cv::Point measPt(measurmt(0),measurmt(1));
                        cv::Mat estim = Kalman.correct(measurmt);
                        cv::Point statePt(estim.at<float>(0),estim.at<float>(1));
                    }

                    prevRectK = rectK;
                }

                std::string str = std::to_string(sqrt(pow(Kalman.statePre.at<float>(2), 2)+pow(Kalman.statePre.at<float>(3), 2)));

                //cv::circle(sequence[i], measPt, 1, cv::Scalar(0,255,0), 7, 24);
                cv::circle(sequence[i], predictPt, 1, cv::Scalar(0,255,255), 7, 24);
                cv::putText(sequence[i], str, predictPt, cv::FONT_HERSHEY_PLAIN, 1.0, CV_RGB(255,0,0), 2.0);
            }

            cv::imshow("Video", sequence[i]);

        }


        //------------------CLEAR-VARIABLES------------------------------------//

        detectedPedestrian.clear();
        featuresDetected.clear();
        boxes.clear();

        previousSequence.release();

        flow.release();
        imGray.release();
        imGrayPrev.release();

        roiHogDetected.clear();
        backProj.clear();

        //------------------CONDITIONS-ARRET-----------------------------------//

        if (cv::waitKey((int)(1000/fps)) == 27) //wait for 'esc' key press for 30ms. If 'esc' key is pressed, break loop
        {
            std::cout << "esc key is pressed by user" << std::endl;
            return 0;
        }
    }

    cv::waitKey(0);
    return 0;
}
예제 #5
0
	static void dbscanCalculator(char * inputFile, int minPts, float removePercentage){

		clock_t start_time, end_time;	  // clock_t
		start_time = clock();				  // Start_Time

		int nodeNum = 0;
		char oneLine[256];
		std::string line;
		std::string x, y, z, z_1, del, cluster;

		std::ifstream nodeToCommunity1; 	std::ifstream nodeToCommunity2;
		std::ofstream ofs;		std::ofstream distanceOutputStream;
		std::string inputFileName(inputFile);

		std::stringstream out;
		out << minPts;

		std::ostringstream ss;
		ss << removePercentage * 100;
		std::string removeP(ss.str());

		nodeToCommunity1.open(inputFile);

		if (nodeToCommunity1.is_open()){//calc maximal cluster size
			while (getline(nodeToCommunity1, line)){
				strcpy(oneLine, line.c_str());
				del = strtok(oneLine, "\t ");
				x = strtok(NULL, "\t ");
				y = strtok(NULL, "\t ");
				nodeNum++;
			}
		}
		else{
			std::cout << "can't read file" << std::endl;
		}

		nodeToCommunity1.close();
		nodeToCommunity2.open(inputFile);

		int* communitySelf = new int[nodeNum];

		std::cout<<"Dimension : "<<DIMENSION<<std::endl;
		float* points[DIMENSION];
		for(int i = 0; i < DIMENSION; i++){
			points[i] = new float[nodeNum];
		}

		bool* visited = new bool[nodeNum];
		int* countN = new int[nodeNum];
		int* communityInfo = new int[nodeNum];
		bool* isSeed = new bool[nodeNum];
		for (int ttt = 0; ttt < nodeNum; ttt++){
			for(int j = 0; j < DIMENSION; j++){
				points[j][ttt] = 0.0f;
			}
			visited[ttt] = false;
			countN[ttt] = 0;
			communityInfo[ttt] = -1;
			isSeed[ttt] = false;
		}

		int counter = 0;
		std::string tempVar;
		if (nodeToCommunity2.is_open()){
			while (getline(nodeToCommunity2, line)){
				strcpy(oneLine, line.c_str());
				del = strtok(oneLine, "\t ");
				for(int j = 0; j < DIMENSION; j++){
					tempVar = strtok(NULL, "\t ");
					points[j][counter] = atof(tempVar.c_str());
				}
				counter++;
			}
		}

		else{
			std::cout << "can't read file" << std::endl;
		}

		float* dist_vec = new float[nodeNum];
		float* dist_sorted = new float[nodeNum];
		float eps;

		for (int i = 0; i < nodeNum; i++){
			for (int j = 0; j < nodeNum; j++){
				dist_sorted[j] = calcDist(points, i, j);		//precalc
			}
			std::sort(dist_sorted, dist_sorted + nodeNum);
			dist_vec[i] = dist_sorted[minPts - 1];
		}

		//##############################################################
		//To Select EPS
		//##############################################################

		std::sort(dist_vec, dist_vec + nodeNum, std::greater< float>());

		//NORMALIZATION ********************
		//Removing outlier to maximal value

		double trunc = (nodeNum * removePercentage);
		int tr = (int)trunc;
		std::cout << "Truncated # = " << tr << std::endl;

		for (int i = 0; i < tr; i++){
			dist_vec[i] = 0;
		}

		std::sort(dist_vec, dist_vec + nodeNum, std::greater< float>());

		//**********************************

//		std::string outputdistance = inputFileName + "_MinPts_" + out.str() + "_RemovePercent_" + removeP + "_distance.dat";
//		distanceOutputStream.open(outputdistance.c_str());
//		for (int ppo = 0; ppo < nodeNum; ppo++){
//			distanceOutputStream << ppo + 1 << "\t" << dist_vec[ppo] << std::endl;
//		}
//		distanceOutputStream.close();

		//Save to point array
		point* original = new point[nodeNum];
		for (int i = 0; i < nodeNum; i++){
			original[i].x = i;
			original[i].y = dist_vec[i];
		}

		//find minVal, maxVal of Y
		float maxVal = -1;
		float minVal = 999999;
		for (int i = 0; i < nodeNum; i++){
			if (original[i].y >= maxVal){
				maxVal = original[i].y;
			}
			if (original[i].y <= minVal){
				minVal = original[i].y;
			}
		}
		//min-max normalization
		for (int i = 0; i < nodeNum; i++){
			original[i].x = ((original[i].x - 0) / nodeNum) * 1;
			original[i].y = ((original[i].y - minVal) / (maxVal - minVal));
		}

		//rotation
		for (int i = 0; i < nodeNum; i++){
			original[i].x = cos(-PI / 4.0f) * original[i].x + sin(-PI / 4.0f)*(original[i].y - 1.0f);
			original[i].y = -sin(-PI / 4.0f) * original[i].x + cos(-PI / 4.0f)*(original[i].y - 1.0f);
		}

		minVal = 999999;
		int minValueIdx = -1;
		for (int i = 0; i < nodeNum; i++){
			if (original[i].y <= minVal){
				minVal = original[i].y;
				minValueIdx = i;
			}
		}

		std::cout << "Approximated Value for DBSCAN = " << dist_vec[minValueIdx] << std::endl;
		eps = dist_vec[minValueIdx];

		delete  dist_sorted;
		delete  dist_vec;

		//##############################################################
		//Algorithm Start
		//##############################################################

		for (int i = 0; i < nodeNum; i++){
			for (int j = 0; j < nodeNum; j++){
				if (calcDist(points, i, j) <= eps){
					countN[i]++;
				}
			}
		}


		int currentCmty = 0;
		int icmty;
		std::set< int> setN;

		for (int i = 0; i < nodeNum; i++){
			visited[i] = true;   //Mark P as visited

			setN.clear();

			if (countN[i] >= minPts){   //NeighborPts = regionQuery(P, eps)
				isSeed[i] = true;

				if (communityInfo[i] == -1){
					communityInfo[i] = ++currentCmty;
				}
				icmty = communityInfo[i];

				for (int j = 0; j < nodeNum; j++){  //insert one hop
					if (i == j)
						continue;

					if (calcDist(points, i, j) <= eps){
						setN.insert(j);
						if (countN[j] >= minPts){
							isSeed[j] = true;
						}
					}
				}


				for (std::set<int >::iterator IterPos = setN.begin(); IterPos != setN.end();){
					IterPos = setN.begin();

					int cur = *IterPos;
					setN.erase(IterPos++);
					if (visited[cur] == false){
						visited[cur] = true;
						for (int k = 0; k < nodeNum; k++){
							if (cur == k)
								continue;

							if (calcDist(points, cur, k) <= eps){
								setN.insert(k);
								if (countN[k] >= minPts){
									isSeed[k] = true;
								}
							}
						}
					}

					if (communityInfo[cur] == -1 || communityInfo[cur] == 0)
						communityInfo[cur] = icmty;
				}

				for (int j = 0; j < nodeNum; j++){
					if (i == j)
						continue;

					if (calcDist(points, i, j) <= eps){
						if (visited[j] == false){   //unvisited
							visited[j] = true;
							communityInfo[j] = communityInfo[i];
						}
					}
				}
			}

			else {  //mark P as noise
				if (communityInfo[i] == -1)
					communityInfo[i] = 0;
			}
		}
		end_time = clock();				   // End_Time


		std::ostringstream oout;
		oout << eps;
		std::string varAs = oout.str();
		std::string outputname = inputFileName + "_MinPts_" + out.str() + "_RemovePercent_" + removeP + "_EPS_" + varAs + ".dat";
		ofs.open(outputname.c_str());
		for (int z = 0; z< nodeNum; z++){
			ofs << z + 1 << "\t" << communityInfo[z] << "\t" << isSeed[z] << std::endl;
		}

		printf("Time : %f\n", ((double)(end_time - start_time)) / CLOCKS_PER_SEC);
		printf("######################################\nDBSCAN IS FINISHED!");

		delete[] original;
		ofs.close();
		nodeToCommunity2.close();

		for(int z = 0; z < DIMENSION; z++){
			delete[] points[z];
		}

		//delete points;

		delete[] communityInfo;
		delete[] visited;
		delete[] isSeed;
		delete[]communitySelf;
	}
예제 #6
0
  int ZipFile( const WCHAR* inputFile, const WCHAR* outputFile, int method, int compressionLevel )
  { 
	int err = -1;

    if ( ( inputFile != NULL ) && ( outputFile != NULL ) )
    {
		NSFile::CFileBinary oFile;
		if(oFile.OpenFile(inputFile))
		{
			DWORD dwSizeRead;
			BYTE* pData = new BYTE[oFile.GetFileSize()];
			if(oFile.ReadFile(pData, oFile.GetFileSize(), dwSizeRead))
			{
				zipFile zf = zipOpenHelp(outputFile);

				zip_fileinfo zi;

				zi.tmz_date.tm_sec = zi.tmz_date.tm_min = zi.tmz_date.tm_hour =
					zi.tmz_date.tm_mday = zi.tmz_date.tm_mon = zi.tmz_date.tm_year = 0;
				zi.dosDate = 0;
				zi.internal_fa = 0;
				zi.external_fa = 0;

#if defined(_WIN32) || defined (_WIN64)
				SYSTEMTIME currTime;

				GetLocalTime( &currTime );

				zi.tmz_date.tm_sec = currTime.wSecond;
				zi.tmz_date.tm_min = currTime.wMinute;
				zi.tmz_date.tm_hour = currTime.wHour;
				zi.tmz_date.tm_mday = currTime.wDay;
				zi.tmz_date.tm_mon = currTime.wMonth;
				zi.tmz_date.tm_year = currTime.wYear;
#endif

				wstring inputFileName( inputFile );

				wstring::size_type pos = 0;
				static const wstring::size_type npos = -1;

				pos = inputFileName.find_last_of( L'\\' );

				wstring zipFileName;

				if ( pos != npos )
				{
					zipFileName = wstring( ( inputFileName.begin() + pos + 1 ), inputFileName.end() );
				}
				else
				{
					zipFileName = wstring( inputFileName.begin(), inputFileName.end() );
				}
				std::string zipFileNameA = codepage_issue_fixToOEM(zipFileName);
                err = zipOpenNewFileInZip( zf, zipFileNameA.c_str(), &zi, NULL, 0, NULL, 0, NULL, method, compressionLevel );
				err = zipWriteInFileInZip( zf, pData, dwSizeRead );
				err = zipCloseFileInZip( zf );
				err = zipClose( zf, NULL );
			}
			RELEASEARRAYOBJECTS(pData);
		}
	}

    return false;
  }
예제 #7
0
int main(int argc, char * argv[])
{
	try
	{
		TCLAP::CmdLine cmd("This utility converts the binary format into human readable one", ' ', "0");

		TCLAP::SwitchArg group("g", "group", "Group together positions of the same junctions", cmd, false);

		TCLAP::UnlabeledValueArg<std::string> inputFileName("infile",
			"input file name",
			true,
			"",
			"file name",
			cmd);
		
		cmd.parse(argc, argv);
		TwoPaCo::JunctionPosition pos;
		TwoPaCo::JunctionPositionReader reader(inputFileName.getValue().c_str());		

		if (group.isSet())
		{
			std::vector<EqClass> eqClass;
			std::vector<TwoPaCo::JunctionPosition> junction;
			while (reader.NextJunctionPosition(pos))
			{
				junction.push_back(pos);
			}

			std::sort(junction.begin(), junction.end(), CompareJunctionsById);
			for (size_t i = 0; i < junction.size();)
			{
				size_t j = i;
				for (; j < junction.size() && junction[i].GetId() == junction[j].GetId() ; j++);
				std::sort(junction.begin() + i, junction.begin() + j, CompareJunctionsByPos);
				eqClass.push_back(EqClass());
				eqClass.back().label = junction[i].GetId();
				for (size_t k = i; k < j; k++)
				{
					eqClass.back().position.push_back(junction[k]);
				}

				i = j;
			}

			std::sort(eqClass.begin(), eqClass.end(), CompareJunctionClasses);
			for (auto junctionClass : eqClass)
			{
				for (auto j : junctionClass.position)
				{
					std::cout << j.GetChr() << ' ' << j.GetPos() << "; ";
				}

				std::cout << std::endl;
			}
		}
		else
		{
			while (reader.NextJunctionPosition(pos))
			{
				std::cout << pos.GetChr() << ' ' << pos.GetPos() << ' ' << pos.GetId() << std::endl;
			}
		}
	}
	catch (TCLAP::ArgException &e)
	{
		std::cerr << "error: " << e.error() << " for arg " << e.argId() << std::endl;
		return 1;
	}	

	return 0;
}
예제 #8
0
int main( int argc, char* argv[] )
{
    // get input/output image file names from command line
    if (argc != 3)
    {
        std::cout << "Usage instructions: " << std::endl;
        std::cout << "> hw5.exe inputFileName.bmp coloredOutputFileName.bmp" << std::endl;
        return -1;
    }
    std::string inputFileName(argv[1]);
    std::string coloredOutputFileName(argv[2]);

    // read image from input file
    std::cout << "Reading input image: " << inputFileName << std::endl;
    Image myImage;
    bool success = myImage.readFromBMPFile(inputFileName);
    if (! success)
    {
        std::cout << "Error reading input image." << std::endl;
        return -1;
    }

	//The rest of your code goes here...
    int seedpixel = 0; //stores the value returned from markConnectedComponent
    int numCircles = 0; //stores number of circles
    int numSquares = 0; //stores number of squares
    int rows; //stores the image's row number
    int cols; //stores the image's column number
    int ccLabel = 0; //Connected Component Label
    //Pixel markers
    pixelLocation foundLoc;
    foundLoc.r = 0;
    foundLoc.c = 0;

    Circle circleObj; //Circle class object
    cSquare squareObj; //Square class object

    std::vector<Circle> cVect1; //Circle vector
    std::vector<cSquare> sqVect1; //Square vector
    
    int cVect2[100];
    int sqVect2[100];

    //Get number of rows and cols in Image
    rows = myImage.getNumRows();
    cols = myImage.getNumCols();

    //sorts through the image to find the circles and squares
    while(findPixelLocationWithGivenValue(myImage, 255, foundLoc.r, foundLoc.c) && ccLabel<100)
    {
        ccLabel++;
        seedpixel = markConnectedComponent(myImage, foundLoc.r, foundLoc.c, 100);
            
        if( seedpixel >= CIRCLE)
        {
            circleObj.setRadiusFromArea(seedpixel);
            cVect2[numCircles] = circleObj.getPerimeter();
            cVect1.push_back(circleObj);
            myImage.setAllPixelsWithOldValToNewVal(100, LABELCIRCLE);
            numCircles++;
        }
        else{                
            squareObj.setSideLengthFromArea(seedpixel);
            sqVect2[numSquares] = squareObj.getPerimeter();
            sqVect1.push_back(squareObj);
            myImage.setAllPixelsWithOldValToNewVal(100, LABELSQUARE);
            numSquares++;
        }
    }

    //Writes the resulting labeled-shape image to a file, assigning each label a random color
    myImage.switchToRandomColorMapping();    
    myImage.writeToBMPFile("My Mona Lisa");
    
    //Output of the number of circles found and each circle's estimated Perimeter (Radius) pairs
    std::cout << "Number of CIRCLES: " << numCircles << std::endl;
    std::cout << "Circle Perimeters (Radius): " << std::endl;
    for(int i = 0; i < numCircles ; i++)
    {
        std::cout << cVect2[i] << " " << cVect2[i]/(2*3.14159) << std::endl;
    } 

    //Output of the number of squares found and each square's estimated Perimeter (Side Length) pairs
    std::cout << "Number of SQUARES: " << numSquares << std::endl;
    std::cout << "Square Perimters (Side Length):" << std::endl;
    for(int j = 0; j < numSquares ; j++)
    {
        std::cout << sqVect2[j] << " " << sqVect2[j]/4 << std::endl;
    }

    return 0;
}
예제 #9
0
//==============================================================================
void AudioFileConverter::run()
{
	
	while ( getQueueSize() > 0 )
	{	
		{   // lock jobQueue before retrieving a task
			const ScopedLock lock (queueLock);
			task  = jobQueue[0];
		}

		/* try opening the file */
		File	inputDataFile( task->getFileName() );
		String  inputFileName( inputDataFile.getFullPathName() );

		if ( !inputDataFile.existsAsFile() || (inputDataFile.getSize() == 0) )
		{
			dbgOut(L"** AudioFileConverter ** Invalid or corrupted temporary file:\t" + inputFileName);
			removeFromQueue();
			continue;
		}
	
		/* try creating the input stream */
		FileInputStream*	fileInputStream	=	inputDataFile.createInputStream();
		if (fileInputStream == NULL)
		{
			dbgOut(L"** AudioFileConverter ** Unable to create input stream for file:\t" + inputFileName);
			removeFromQueue();
			continue;
		}
		
		dbgOut(L"");
		dbgOut(L" ***  AudioFileConverter ***");
		dbgOut(L"** AudioFileConverter ** Converting file:\t" + inputFileName 
               + L" (" + String( inputDataFile.getSize() ) + L" b)");

		int		processorOutputs = task->getChannelNumber();
		const int bytesPerSample = processorOutputs * sizeof(float);
		int		bufferSize		= task->getBufferSize();
		double	samplingRate	= task->getSamplingRate();
		int		bitDepth		= task->getBitDepth();
		String	audioFormatName = task->getFormat();

		AudioSampleBuffer tempBuffer(1, bufferSize);

		// declare classes needed to save the format
		OwnedArray<AudioFormat>			someAudioFormats;
		OwnedArray<AudioFormatWriter>	audioFormatWriters;
		OwnedArray<File>				audioFiles;
		Array<FileOutputStream*>		outStreams;
		String							audioFileName;

		AudioFormatWriter*	tmpWriter;
		FileOutputStream*	tmpStream;
		File*				tmpAudioFile;

		String outputDir = inputDataFile.getParentDirectory().getFullPathName();
	
		for (int i=0; i < processorOutputs ; i++)
		{
			// Delete temporary files
			File tmpDataFile(outputDir + File::separatorString + L"channel" + String::formatted("%.2d", i ) + ".dat");

			if ( tmpDataFile != File::nonexistent)
			{
				dbgOut( L"** AudioFileConverter ** \tDeleting temporary file:\t" + tmpDataFile.getFullPathName() );
				tmpDataFile.deleteFile();
			}
			else
			{
				dbgOut( "** AudioFileConverter ** Unable to delete temporary file:\t\t" + tmpDataFile.getFullPathName() );
			}

		
			// Define the format (wav is default)
			if (audioFormatName == "wav")
				someAudioFormats.add( new WavAudioFormat() );			
		
			else if (audioFormatName == "aiff")
				someAudioFormats.add( new AiffAudioFormat() );
		
			else if (audioFormatName == "flac")
				someAudioFormats.add( new FlacAudioFormat() );

//			else if (audioFormatName == "ogg")
//				someAudioFormats.add( new OggVorbisAudioFormat() );

			else
				someAudioFormats.add( new WavAudioFormat() );	
		
			audioFileName = outputDir + File::separatorString + "channel" + String::formatted("%.2d",i) + someAudioFormats[i]->getFileExtensions()[0];
		
			tmpAudioFile = new File (audioFileName);
			if (*tmpAudioFile == File::nonexistent)
			{
				dbgOut( L"** AudioFileConverter ** Unable to create file:\t" + audioFileName );
				audioFormatWriters.clear(true);
				someAudioFormats.clear(true);
				audioFiles.clear(true);
				outStreams.clear();

				delete fileInputStream;
				
				removeFromQueue();

				continue;
			}
		
			audioFiles.add( tmpAudioFile );

			// Delete existing files
			if (audioFiles[i]->existsAsFile())
			{
				dbgOut( "** AudioFileConverter ** \tDeleting existing audio file:\t\t" + audioFileName );			
				if	(!audioFiles[i]->deleteFile())
				{
					dbgOut( L"** AudioFileConverter ** Unable to delete existing file:\t" + audioFileName );
					audioFormatWriters.clear(true);
					someAudioFormats.clear(true);
					audioFiles.clear(true);
					outStreams.clear();
					delete fileInputStream;

					removeFromQueue();

					continue;
				}
			}

			dbgOut( "** AudioFileConverter ** \tSaving audio file:\t\t" + audioFileName );

			/* Create output stream for this file */
			tmpStream = audioFiles[i]->createOutputStream();
			if (tmpStream == NULL)
				{
					dbgOut( L"** AudioFileConverter ** Unable to create output stream for file:\t" + audioFileName );
					delete tmpAudioFile;
					audioFormatWriters.clear(true);
					someAudioFormats.clear(true);
					audioFiles.clear(true);
					outStreams.clear();
					delete fileInputStream;

					removeFromQueue();

					continue;
				}

			outStreams.add( tmpStream );
		

			/* Create Audio Format Writer */
			tmpWriter = someAudioFormats[i]->createWriterFor(	outStreams[i],		// streamToWriteTo,
																			samplingRate,		// sampleRateToUse,  
																			1,					// numberOfChannels,  
																			someAudioFormats[i]->getPossibleBitDepths().getLast(),	// bitsPerSample - Get the maximum possible bit depth for this format
																			NULL,				//  metadataValues,  
																			0 );


			if (tmpWriter == NULL)
			{
					dbgOut( L"** AudioFileConverter ** Unable to create audio format writer for:\t" + audioFileName );
					delete tmpAudioFile;
					audioFormatWriters.clear(true);
					someAudioFormats.clear(true);
					audioFiles.clear(true);
					outStreams.clear();
					delete fileInputStream;

					removeFromQueue();
		
					continue;
			}
			audioFormatWriters.add( tmpWriter );
		}

		// Write data to wav file
		int dataBlockSize = processorOutputs * bufferSize * bitDepth/8 ;
		MemoryBlock*	buffer = new MemoryBlock( dataBlockSize, true);
	
		int64 bytesSaved = inputDataFile.getSize();

		while ( !fileInputStream->isExhausted() && (fileInputStream->getPosition() <  bytesSaved) )
		{
			float* x = (float *) buffer->getData() ;

			int bytesRead = fileInputStream->read( (void *)x, dataBlockSize );
			int numSamples = (int)( bytesRead / bytesPerSample );

			for (int ch=0; ch < processorOutputs; ch++)
			{
//				const int numBytes = (int) (bytesRead/processorOutputs);

				tempBuffer.copyFrom(	0,					//  const int   	 destChannel,
										0,					//	const int  	destStartSample,
										x+ch*numSamples,	//	const float *  	source,
										numSamples			//	int  	numSamples	 
									);

				audioFormatWriters[ch]->write(	(const int**)(tempBuffer.getArrayOfChannels()),	//AudioFormatWriter *  writer,  
												numSamples				//const int  numSamples   
											  );
			}
		}

		// clean up
		delete	buffer;

		//	this should delete 'owned' objects 
		audioFormatWriters.clear(true);
		someAudioFormats.clear(true);
		audioFiles.clear(true);
		// clear the outStreams without deleting objects (already deleted)
		outStreams.clear();
	
		// Delete and close the stream
		delete fileInputStream;	

		// Delete the data.dat file
		dbgOut( L"** AudioFileConverter ** \tDeleting temporary file:\t" + inputFileName );
		inputDataFile.deleteFile();

		// Delete the task
		removeFromQueue();		
		
		dbgOut( "** AudioFileConverter ** Files saved." );

	}

	dbgOut( "** AudioFileConverter ** Thread terminates." );

}