Beispiel #1
0
	void watch(string vertName, string fragName)
	{
		vertShader.open(vertName);
		fragShader.open(fragName);
		
		ofAddListener(ofEvents().update, this, &AutoLoadShader::onUpdate);
	}
Beispiel #2
0
	void onUpdate(ofEventArgs &e)
	{
		if (lastVertTimestamp != vertShader.getPocoFile().getLastModified()
			|| lastFragTimestamp != fragShader.getPocoFile().getLastModified())
		{
			reload();
		}
	}
// ---------------------------------------------------------------------------------------------------------------------------------------------------
//
std::time_t ofxAutoReloadedShader::getLastModified( ofFile& _file )
{
	if( _file.exists() )
	{
        return std::filesystem::last_write_time(_file.path());
	}
	else
	{
		return 0;
	}
}
std::time_t UIShader::getLastModified( ofFile& _file ){
	if( _file.exists() ){
		Poco::File& pocoFile		= _file.getPocoFile();
		Poco::Timestamp timestamp	= pocoFile.getLastModified();
		std::time_t fileChangedTime = timestamp.epochTime();
		
		return fileChangedTime;
	} else {
		return 0;
	}
}
//--------------------------------------------------------------
void ofApp::processFile(ofFile & file) {
	cout << file.getAbsolutePath() << endl;
	auto extension = ofToLower(file.getExtension());
	cout << extension << endl;
	if (extension == "jpg" || extension == "jpeg" || extension == "png") {
		ofImage loader;
		ofLogNotice("ofApp::dragEvent") << "loading " << file.getFileName() << " #" << decoder.getFrame();
		loader.loadImage(file);
		ofLogNotice("ofApp::dragEvent") << "adding " << file.getFileName();
		decoder << loader;
	} else if (extension == "sl") {
		this->decoder.loadDataSet(file.getAbsolutePath());
	}
}
Beispiel #6
0
long getUpdateTime(ofFile &file) {
	struct stat fileStat;
    if(stat(file.path().c_str(), &fileStat) < 0) {
		printf("Couldn't stat file\n");
		return;
	}

	return fileStat.st_mtime;
}
//--------------------------------------------------------------
void ArtvertiserApp::artvertSelected(ofFile & artvertimg){
	

	subs_img.setup( artvertimg.getAbsolutePath() );

	state = Tracking;
	artvertInfo.stop();
	Artvert & artvert = artvertInfo.getCurrentArtvert();

	if(artvert.hasAlias()){
		artvertiser.setup(artvert.getAlias().getModel().getAbsolutePath(),grabber,subs_img.getImgQuad(), false, detectW, detectH );
		ofLogVerbose("ArtvertiserApp", "artvert.hasAlias()");
	}else{
		ofLogVerbose("ArtvertiserApp", "doesn't: artvert.hasAlias()");
		artvertiser.setup(artvert.getModel().getAbsolutePath(),grabber,subs_img.getImgQuad(), false, detectW, detectH );
	}
}
void testApp::writeLog(){
    //-------------------------
    // Writing machine learning data
    if(bTracking && bCalibrated){
        state = 0; // state is used for machine learning mark-up
        if(keys['1']) state = 1;
        if(keys['2']) state = 2;
       // if(keys['3']) state = 3;
       
        
       logFile << trackers[0].lerpedPos.x << " " << trackers[0].lerpedPos.z << " "
        << trackers[1].lerpedPos.x << " " << trackers[1].lerpedPos.z << " "
        << trackers[2].lerpedPos.x << " " << trackers[2].lerpedPos.z << " " << state << endl;
        
        cout << trackers[0].lerpedPos.x << " " << trackers[0].lerpedPos.z << " "
        << trackers[1].lerpedPos.x << " " << trackers[1].lerpedPos.z << " "
        << trackers[2].lerpedPos.x << " " << trackers[2].lerpedPos.z << " " << state << endl;
        logFile.flush();
    }
}
Beispiel #9
0
	void reload()
	{
		load(vertShader.path(), fragShader.path());
	}
void ofImage_<PixelType>::saveImage(const ofFile & file, ofImageQualityType compressionLevel){
	ofSaveImage(pixels,file.getAbsolutePath(),compressionLevel);
}
bool ofImage_<PixelType>::loadImage(const ofFile & file){
	return loadImage(file.getAbsolutePath());
}
bool ofImage_<PixelType>::load(const ofFile & file, const ofImageLoadSettings &settings){
	return load(file.getAbsolutePath(), settings);
}
//--------------------------------------------------------------
void testApp::setup(){
    
    ofBackground(0);
    ofSetFrameRate(60);
    
    int PORT = 12000;
    int REMOTE_PORT = 12000;
    string REMOTE_HOST = "169.254.0.1";
    
	receiver.setup(PORT);
    sender.setup(REMOTE_HOST, REMOTE_PORT);
    
    sprintf(oscStatus, "[LOCALPORT] %d\n[REMOTE PORT] (%s, %5d)\n", PORT, REMOTE_HOST.c_str(), REMOTE_PORT);
    
    gui.setup();
	gui.addTitle("TRACKER \n[i] hide controls");
    gui.addToggle("TRACK", bTracking).setSize(200, 20);
    calibratedButton = &gui.addToggle("CALIBRATED", bCalibrated);
    calibratedButton->setSize(200, 20);
    gui.addToggle("SAVE", bSaving).setSize(200, 20);
    gui.addButton("Load MeshLab File", bLoadMLP).setSize(200, 20);
    gui.addButton("Reset Server Counter", bReset).setSize(200, 20);

    gui.addSlider("Zoom", camZoom, -5000, 5000).setSmoothing(0.9);
    gui.addSlider("camPosX", camPosX, -200, 200).setSmoothing(0.9);
    gui.addSlider("camPosY", camPosY, -200, 200).setSmoothing(0.9);
    gui.addSlider("camRotX", camRotX, 0, 360).setSmoothing(0.9);
    gui.addSlider("camRotY", camRotY, 0, 360).setSmoothing(0.9);
    gui.addToggle("Lock top view", bTop).setSize(200, 20);
    gui.addToggle("Enable ghost 1", bGhost0).setSize(200, 20);
    gui.addToggle("Enable ghost 2", bGhost1).setSize(200, 20);
    
    
    gui.addButton("Set Center", bSetCenter).setSize(200, 20);
    gui.addButton("Set Ref Point", bSetRefPoint).setSize(200, 20);
    gui.addButton("Set Ghost 1", bSetGhost0).setSize(200, 20);
    gui.addButton("Set Ghost 2", bSetGhost1).setSize(200, 20);
    status = &gui.addTitle("STATUS");
    status->setNewColumn(true);
    
	gui.loadFromXML();
    gui.show();
    
    matrixData.setup();
    
    // LOAD space references
    
    XML.loadFile(ofToDataPath("xmlSettings.xml"));
    int cX = XML.getValue("CENTER_X", 0);
    int cZ = XML.getValue("CENTER_Z", 0);
    int rX = XML.getValue("REF_X", 0);
    int rZ = XML.getValue("REF_Z", 0);
    
    center = ofVec3f(cX, 0, cZ);
    refPoint = ofVec3f(rX, 0,  rZ);
    refVector = center - refPoint;
    
    
    for(int i = 0; i < K; i++){
        kinects[i].setMatrix(matrixData.getMatrix(i));
        kinects[i].setCenter(center, refVector);
    }
    
    // LOAD GHOSTS
    int gX, gZ;
    
    gX = XML.getValue("GHOST_0_X", 0);
    gZ = XML.getValue("GHOST_0_Z", 0);
    ghost0 = ofVec3f(gX, 0, gZ);
    
    gX = XML.getValue("GHOST_1_X", 0);
    gZ = XML.getValue("GHOST_1_Z", 0);
    ghost1 = ofVec3f(gX, 0, gZ);
    
    //-----
    
    
    bTracking = false;
    bCalibrated = false;
    bSaving = false;
    pbSaving = false;
    bReset = false;
    bTop = false;
    
    
    // Purge old osc messages
    while(receiver.hasWaitingMessages()){
		ofxOscMessage m;
		receiver.getNextMessage(&m);
    }

  
    logFile.open("log.txt", ofFile::WriteOnly, false);
    /*
    AAAAAdasdasdadsa
    bGhost0 = true;
    bGhost1 = true;
     */
}
Beispiel #14
0
template <class T> void writeRaw(ofFile& out, T data) {
	out.write((char*) &data, sizeof(data)); 
}
Beispiel #15
0
template <class T> void readRaw(ofFile& in, T& data) {
	in.read((char*) &data, sizeof(data)); 
}
Beispiel #16
0
// Create the clusters out of the image
ImgTexture ImgTextureFactory::CreateImage(const ofFile& inImageFile, const ofFile& inClusterFile)
{
	std::cout << "\nCreating Image: " << inImageFile.getFileName() << "\n";

	std::cout << "\nCreating Clustered Image: " << inClusterFile.getFileName() << "\n";

	// First create an image out of the incoming file, this can have any format
	ofImage image;
	image.loadImage(inImageFile);
	assert(image.isAllocated());

	// Load our clustered image in a pixel buffer
	ofPixels clustered_pix;
	ofLoadImage(clustered_pix, inClusterFile.getFileName());

	/*
	// Load our sample image in a pixel buffer
	ofPixels clustered_sample_pix;
	ofLoadImage(clustered_pix, inClusterFile.getFileName());
	*/

	// Now create our clustered image, this needs to be of a type (rgba, 8bc)
	assert(clustered_pix.isAllocated());
	assert(clustered_pix.getImageType() == OF_IMAGE_COLOR_ALPHA);
	assert(clustered_pix.getBitsPerPixel() == 32);

	// Will hold the final amount of clusters
	int cluster_counter = 0;

	// Used for storing the color value
	std::vector<BYTE>			colors;
	std::vector<int>			color_refs;

	// Now get the pixels and iterate over them
	// We sample how many clusters there are in the image,
	// The colors that belong to that cluster and how many times that color appears in the image
	BYTE* pixels = clustered_pix.getPixels();
	for (int x=0; x < clustered_pix.getWidth(); x++)
	{
		for(int y=0; y < clustered_pix.getHeight(); y++)
		{
			// Get the current read pos and sample color
			int read_pos = ((y*clustered_pix.getWidth())+ x) * 4;

			// Make sure alpha is not 0
			if(pixels[read_pos+3] < 2)
				continue;

			BYTE color = pixels[read_pos];

			// See if it's in the colors array, if not add
			bool	already_present = false;
			int		color_counter = 0;
			for(std::vector<BYTE>::iterator it = colors.begin(); it != colors.end(); ++it) 
			{
				// Get the color and sample within a range
				BYTE p_color = *it;
				if(!(color > *it+2) && !(color < *it-2))
				{
					already_present = true;
					(color_refs[color_counter])++;
					break;
				}

				// Increment color counter
				color_counter++;
			}

			if(!already_present)
			{
				colors.push_back(color);
				color_refs.push_back(1);
				cluster_counter++;
			}
		}
	}

	// Now delete the clusters that don't have enough pixels
	std::vector<BYTE> final_color_set;
	int counter = 0;
	int fclusters = 0;
	for(std::vector<int>::iterator it = color_refs.begin(); it != color_refs.end(); ++it)
	{
		if(*it >= MIN_CLUSTER_PIXEL_COUNT)
		{
			final_color_set.push_back(colors[counter]);
			fclusters++;
		}
		counter++;
	}

	// Sort the remaining ones based on color
	vector<BYTE>::iterator it;
	sort(final_color_set.begin(), final_color_set.end());

	// Return a new texture that holds the image and clusters
	return ImgTexture(image, clustered_pix, fclusters, final_color_set);
}