예제 #1
0
파일: main.c 프로젝트: Earlz/ancientstuff
void DoArgs(int argcount,char **arg){
    char *current;
    char *tmp;
    unsigned int i,j=1;
    current=arg[1];
    argcount--;
    //printf(current);
    //printf("\n%i\n",argcount);
    if(*arg[1]!='?'){ParseConfigFile(arg[1]);argcount--;j=2;}
    while(argcount!=0){
        tmp=current;
        if(tmp[0]!='?'){panic("Arg does not start with \'?\'");}
        for(i=0;i<strlen(current);i++){if(tmp[i]==':'){tmp[i]=0; break;}}
        if(strcmp(tmp,"?f")==0){printf("\nTTT\n");ParseConfigFile(tmp+strlen(tmp)+1);}  //debugging purposes...
        if(strcmp(tmp,"?L_com")==0){LoadComFile(tmp+strlen(tmp)+1);}  //load a .com file
        if(strcmp(tmp,"?log")==0){SetLogFile(tmp+strlen(tmp)+1);}  //use a log file
        if(strcmp(tmp,"?L_bin")==0){LoadBinFile(tmp+strlen(tmp)+1);} //load binary file
        if(strcmp(tmp,"?L_dev")==0){printf("bah");InitExLib(tmp+strlen(tmp)+1);} //load device
       // if(strcmp(tmp,"?bios")==0){LoadBios(tmp+strlen(tmp)+1);}
        j++;
        current=arg[j];
        argcount--;
        }


}
예제 #2
0
/**
*** ParseOptions()
**/
void ParseOptions(button_info *ub)
{
    char *s;
    char *items[]= {"iconpath","pixmappath",NULL,NULL};

    items[2]=mymalloc(strlen(MyName)+2);
    sprintf(items[2],"*%s",MyName);

    GetConfigLine(fd,&s);
    while(s && s[0])
    {
        switch(MatchSeveralLines(s,items,&s))
        {
        case -1:
            break;
        case 0:
            CopyString(&iconPath,s);
            break;
        case 1:
            CopyString(&pixmapPath,s);
            break;
        case 2:
            if(s && s[0] && !config_file)
                ParseConfigLine(&ub,s);
        }
        GetConfigLine(fd,&s);
    }

    if(config_file)
        ParseConfigFile(ub);

    return;
}
예제 #3
0
파일: sconfig.cpp 프로젝트: yohoj/ITS
SConfig::SConfig()
{
    log.Log("SConfig:SConfig",COMMON,"初始化");
    CreateNode();
    ParseConfigFile(ConfigPath);
    log.Log("SConfig:SConfig",COMMON,"初始化完成");
    //display();
}
예제 #4
0
파일: parse.c 프로젝트: ThomasAdam/fvwm-cvs
/**
*** ParseOptions()
**/
void ParseConfiguration(button_info *ub)
{
	char *s;
	char *items[] =
	{
		NULL, /* filled out below */
		"imagepath",
		"colorset",
		XINERAMA_CONFIG_STRING,
		NULL
	};

	items[0] = mymalloc(strlen(MyName) + 2);
	sprintf(items[0], "*%s", MyName);

	/* send config lines with MyName */
	InitGetConfigLine(fd, items[0]);
	GetConfigLine(fd, &s);
	while (s && s[0])
	{
		char *rest;
		switch (GetTokenIndex(s,items,-1,&rest))
		{
		case -1:
			break;
		case 0:
			if (rest && rest[0] && !config_file)
			{
				ParseConfigLine(&ub, rest);
			}
			break;
		case 1:
			if (imagePath)
			{
				free(imagePath);
			}
			CopyString(&imagePath, rest);
			break;
		case 2:
			/* store colorset sent by fvwm */
			LoadColorset(rest);
			break;
		case 3:
			/* Xinerama state */
			FScreenConfigureModule(rest);
			break;
		}
		GetConfigLine(fd,&s);
	}

	if (config_file)
	{
		ParseConfigFile(ub);
	}

	free(items[0]);
	return;
}
예제 #5
0
파일: config.cpp 프로젝트: kulseran/FISHY
Status ParseFlags(const int argc, const char **argv) {
  Trace();

  for (int i = 0; i < argc; ++i) {
    Log(LL::Info) << "argv[" << i << "] = " << argv[i];
  }

  for (int i = 1; i < argc; ++i) {
    const char *arg = argv[i];
    if ((arg[0] != '-') || (arg[0] != '\0' && arg[1] != '-')) {
      Log(LL::Error) << "Unknown commandline input #" << i << ": " << arg;
      return Status::BAD_ARGUMENT;
    }

    std::vector< std::string > argstr =
        Splitter().on('=').trimWhitespace().split(std::string(arg + 2), 2);
    RET_SM(!argstr.empty(), Status::BAD_ARGUMENT, "Unknown flag: " << arg);

    tFlagIter itr = GetGlobalFlags().find(argstr[0]);
    RET_SM(
        itr != GetGlobalFlags().end(),
        Status::BAD_ARGUMENT,
        "Unknown flag: " << arg);

    iFlagBase *pFlag = itr->second;

    if (argstr.size() == 2) {
      // Parse case where we have [flag][=][value]
      RET_SM(
          pFlag->fromString(argstr.at(1)),
          Status::BAD_ARGUMENT,
          "Invalid flag value for " << argstr[0] << " can't parse "
                                    << argstr[1]);
    } else if (i + 1 < argc) {
      // Parse case where we have [flag][space][value]
      ++i;
      RET_SM(
          pFlag->fromString(std::string(argv[i])),
          Status::BAD_ARGUMENT,
          "Invalid flag value for " << argstr[0] << " can't parse " << argv[i]);
    } else {
      // Error on case where we have [flag] alone.  Booleans must be specified
      // as [flag][=][true]
      Log(LL::Error) << "Found flag with no value: " << argstr[0];
      return Status::BAD_ARGUMENT;
    }
  }

  return ParseConfigFile();
}
예제 #6
0
    /**
        Restore configuration by rereading the configuration file.
        
    */
    void SCXLogFileConfigurator::RestoreConfiguration()
    {
        SCXThreadLock lock(m_lock);
        for (BackendList::iterator iter = m_Backends.begin();
             iter != m_Backends.end();
             ++iter)
        {
            m_Mediator->DeRegisterConsumer(*iter);
        }

        m_MinActiveSeverityThreshold = eSeverityMax;

        m_Backends.clear();
        
        ParseConfigFile();
    }
예제 #7
0
TaGpioInit( char *ConfigFile,       //  Config file for this board.
            int MonitorPeriodInMs   //  The period we want to pool IO for, 0 for default.
            )
{
    //----------------------------
    pthread_mutexattr_t attr;
    int rc;
    //----------------------------

    if (MonitorPeriodInMs <= 100){
        InputMonitorPeriod = DEFAULT_INPUT_MONITOR_PERIOD_MS;
    }
    else if (MonitorPeriodInMs > 10 * 1000){
        InputMonitorPeriod = DEFAULT_INPUT_MONITOR_PERIOD_MS;
    }
    else {
        InputMonitorPeriod = MonitorPeriodInMs;
    }

    if ((rc = ParseConfigFile(ConfigFile)) == -1){
        return rc;
    }

    rc = pthread_mutexattr_init(&attr);
    ASSERT(rc == 0);

    rc = pthread_mutexattr_setprotocol(&attr, PTHREAD_PRIO_INHERIT);
    ASSERT(rc == 0);
    
    rc = pthread_mutex_init(&InputLock, &attr);
    ASSERT(rc == 0);

    rc = pthread_mutex_init(&OutputLock, &attr);
    ASSERT(rc == 0);

    rc = pthread_mutexattr_destroy(&attr);
    ASSERT(rc == 0);
    
    rc = pthread_create(&ThreadId, NULL, InputMonitorThread, NULL);
    ASSERT(rc == 0);

    LOG_LIBRARY_VERSION();

    return rc;
}
예제 #8
0
bool ParseFlags(const int argc, const char **argv) {
  Trace("Parsing " << argc << " flags from the cli");

  for (int i = 0; i < argc; ++i) {
    Log(LL::Info) << "argv[" << i << "] = " << argv[i] << std::endl;
  }

  for (int i = 1; i < argc; ++i) {
    const char *arg = argv[i];
    if ((arg[0] != '-') || (argv[0] != '\0' && arg[1] != '-')) {
      Log(LL::Error) << "Unknown commandline input #" << i
                     << ": " << arg << std::endl;
      return false;
    }

    std::vector<std::string> argstr = core::util::Splitter()
                                      .on('=')
                                      .trimWhitespace()
                                      .split(std::string(arg + 2), 2);
    RET_M(!argstr.empty(), "Unknown flag: " << arg);

    tFlagIter itr = GetGlobalFlags().find(argstr[0]);
    RET_M(itr != GetGlobalFlags().end(), "Unknown flag: " << arg);

    iFlagBase *pFlag = itr->second;

    if (argstr.size() == 2) {
      // Parse case where we have [flag][=][value]
      RET_M(pFlag->fromString(argstr.at(1)),
            "Invalid flag value for " << argstr[0] << " can't parse " << argstr[1]);
    } else if (i + 1 < argc) {
      // Parse case where we have [flag][space][value]
      ++i;
      RET_M(pFlag->fromString(std::string(argv[i])),
            "Invalid flag value for " << argstr[0] << " can't parse " << argv[i]);
    } else {
      // Error on case where we have [flag] alone.  Booleans must be specified as [flag][=][true]
      Log(LL::Error) << "Found flag with no value: " << argstr[0] << std::endl;
      return false;
    }
  }

  Trace("Finished parsing cli flags");
  return ParseConfigFile(g_configFile.get());
}
예제 #9
0
    /**
        Constructor that takes a handle to the log mediator.
             
        \param[in] mediator Pointer to log mediator to configure.
        \param[in] configFilePath Path to configuration file to use.
        \param[in] lock Used to inject a thread lock handle. The default is to
        get a new thread lock handle from the factory.
        \param[in] configRefreshRate Used to inject a configuration refresh rate
        in milliseconds.
    */
    SCXLogFileConfigurator::SCXLogFileConfigurator(SCXHandle<SCXLogMediator> mediator,
                                                   const SCXFilePath& configFilePath,
                                                   const SCXThreadLockHandle& lock /*= ThreadLockHandleGet()*/,
                                                   scxulong configRefreshRate /* = 10000*/) :
        m_Mediator(mediator),
        m_ConfigFilePath(configFilePath),
        m_ConfigVersion(0),
        m_lock(lock),
        m_ConfigRefreshRate(configRefreshRate),
        m_ConfigUpdateThread(0),
        m_ConfFile(m_ConfigFilePath),
        m_MinActiveSeverityThreshold(eSeverityMax)
    {
        ParseConfigFile();

        SCXHandle<LogFileConfiguratorParam> p( new LogFileConfiguratorParam() );
        p->m_configurator = this;
        m_ConfigUpdateThread = new SCXCoreLib::SCXThread(ConfigUpdateThreadBody, p);
    }
예제 #10
0
bool Parser::ParseConfigDir(const std::string& path) {
    INFO("Parsing directory %s...\n", path.c_str());
    std::unique_ptr<DIR, int(*)(DIR*)> config_dir(opendir(path.c_str()), closedir);
    if (!config_dir) {
        ERROR("Could not import directory '%s'\n", path.c_str());
        return false;
    }
    dirent* current_file;
    while ((current_file = readdir(config_dir.get()))) {
        std::string current_path =
            android::base::StringPrintf("%s/%s", path.c_str(), current_file->d_name);
        // Ignore directories and only process regular files.
        if (current_file->d_type == DT_REG) {
            if (!ParseConfigFile(current_path)) {
                ERROR("could not import file '%s'\n", current_path.c_str());
            }
        }
    }
    return true;
}
예제 #11
0
/*----------------------------------------------------------------------
|   NPT_LogManager::ParseConfigSource
+---------------------------------------------------------------------*/
NPT_Result
NPT_LogManager::ParseConfigSource(NPT_String& source) 
{
    if (source.StartsWith("file:")) {
        /* file source */
        ParseConfigFile(source.GetChars()+5);
    } else if (source.StartsWith("plist:")) {
        /* property list source */
        ParseConfig(source.GetChars()+6, source.GetLength()-6);
    } else if (source.StartsWith("http:port=")) {
        /* http configurator */
        unsigned int port = 0;
        NPT_Result result = NPT_ParseInteger(source.GetChars()+10, port, true);
        if (NPT_FAILED(result)) return result;
        new NPT_HttpLoggerConfigurator(port);
    } else {
        return NPT_ERROR_INVALID_SYNTAX;
    }

    return NPT_SUCCESS;
}
예제 #12
0
bool Settings::ReloadConfigurationFile() {
	MS_TRACE();

	if (Settings::arguments.configFile.empty()) {
		MS_ERROR("no configuration file was given in command line options");
		return false;
	}

	libconfig::Config* config;

	try {
		config = ParseConfigFile();
	}
	catch (const MediaSoupError &error) {
		MS_ERROR("%s", error.what());
		return false;
	}

	std::string str_value;

	// Just some configuration settings can be reloaded.

	try {
		/* First level settings. */

		if (config->lookupValue("logLevel", str_value))
			SetLogLevel(str_value);
		else
			Settings::configuration.logLevel = LOG_DEBUG;
	}
	catch (const MediaSoupError &error) {
		MS_ERROR("error in configuration file: %s", error.what());
		delete config;
		return false;
	}

	delete config;
	return true;
}
예제 #13
0
void vDos_LoadConfig(void)
	{
#ifdef WITHIRQ1
	//Option to disable the full IRQ1 keyboard handling
	ConfAddBool("kbxy3", true);
#endif
	ConfAddInt("kbrepdel", 500); 
	ConfAddInt("kbrepinter", 10);
#ifdef BEEP
	//Option to disable the rudimentary sound support
	ConfAddBool("beepxy3", true);
#endif
#ifdef SFN83
	//Option to disable support for short (8.3) file names.
	ConfAddBool("sfn83", true);
#endif
	ConfAddInt("scale", 0);
	ConfAddString("window", "");

	// title and icon emendelson from rhenssel
	ConfAddString("title", "vDos");
	ConfAddString("icon", "vDos_ico");
	
	ConfAddBool("low", false);
	ConfAddString("xmem", "");
	ConfAddString("colors", "");
	ConfAddBool("mouse", false);
	ConfAddInt("lins", 25);
	ConfAddInt("cols", 80);
	ConfAddBool("frame", false);
	ConfAddBool("timeout", true);
	ConfAddString("font", "");
	ConfAddString("wp", "");
	ConfAddBool("blinkc", false);
	ConfAddInt("euro", -1);
	ParseConfigFile();
	}
예제 #14
0
void TestParseConfigFile()
{
     ParseConfigFile( false, "qtss.conf", SampleConfigSetter, NULL );

}
예제 #15
0
void Settings::ReadConfigurationFile() {
	MS_TRACE();

	if (Settings::arguments.configFile.empty())
		return;

	libconfig::Config* config;

	try {
		config = ParseConfigFile();
	}
	catch (const MediaSoupError &error) {
		MS_EXIT_FAILURE("%s", error.what());
	}

	std::string str_value;
	std::string str_value2;
	int int_value;
	int int_value2;
	bool bool_value;
	std::string empty_string;

	try {
		/* First level settings. */

		if (config->lookupValue("logLevel", str_value))
			SetLogLevel(str_value);

		if (config->lookupValue("syslogFacility", str_value))
			SetSyslogFacility(str_value);

		if (config->lookupValue("numWorkers", int_value))
			SetNumWorkers(int_value);

		/* ControlProtocol section. */

		if (config->lookupValue("ControlProtocol.listenIP", str_value))
			SetControlProtocolListenIP(str_value);

		if (config->lookupValue("ControlProtocol.listenPort", int_value))
			SetControlProtocolListenPort(int_value);

		/* RTC section. */

		if (config->lookupValue("RTC.listenIPv4", str_value))
			SetRTClistenIPv4(str_value);
		else if ((config->lookupValue("RTC.listenIPv4", bool_value)) && bool_value == false)
			SetRTClistenIPv4(empty_string);

		if (config->lookupValue("RTC.listenIPv6", str_value))
			SetRTClistenIPv6(str_value);
		else if ((config->lookupValue("RTC.listenIPv6", bool_value)) && bool_value == false)
			SetRTClistenIPv6(empty_string);

		if (config->lookupValue("RTC.minPort", int_value) && config->lookupValue("RTC.maxPort", int_value2))
			SetRTCports(int_value, int_value2);

		if (config->lookupValue("RTC.dtlsCertificateFile", str_value) && config->lookupValue("RTC.dtlsPrivateKeyFile", str_value2))
			SetDtlsCertificateAndPrivateKeyFiles(str_value, str_value2);
	}
	catch (const MediaSoupError &error) {
		delete config;
		MS_EXIT_FAILURE("error in configuration file: %s", error.what());
	}

	delete config;
}
예제 #16
0
int main(int argc, char *argv[])
{
    // get input arguments

    string configFile = "";
    string video_file_left = "", video_file_right = "", video_directory = "";
    int starting_frame_number = 0;
    bool enable_gamma = false;
    float random_results = -1.0;

    int last_frame_number = -1;

    int last_playback_frame_number = -2;

    ConciseArgs parser(argc, argv);
    parser.add(configFile, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(show_display, "d", "show-dispaly", "Enable for visual debugging display. Will reduce framerate significantly.");
    parser.add(show_display_wait, "w", "show-display-wait", "Optional argument to decrease framerate for lower network traffic when forwarding the display.");
    parser.add(show_unrectified, "u", "show-unrectified", "When displaying images, do not apply rectification.");
    parser.add(disable_stereo, "s", "disable-stereo", "Disable online stereo processing.");
    parser.add(force_brightness, "b", "force-brightness", "Force a brightness setting.");
    parser.add(force_exposure, "e", "force-exposure", "Force an exposure setting.");
    parser.add(quiet_mode, "q", "quiet", "Reduce text output.");
    parser.add(video_file_left, "l", "video-file-left", "Do not use cameras, instead use this video file (also requires a right video file).");
    parser.add(video_file_right, "t", "video-file-right", "Right video file, only for use with the -l option.");
    parser.add(video_directory, "i", "video-directory", "Directory to search for videos in (for playback).");
    parser.add(starting_frame_number, "f", "starting-frame", "Frame to start at when playing back videos.");
    parser.add(display_hud, "v", "hud", "Overlay HUD on display images.");
    parser.add(record_hud, "x", "record-hud", "Record the HUD display.");
    parser.add(file_frame_skip, "p", "skip", "Number of frames skipped in recording (for playback).");
    parser.add(enable_gamma, "g", "enable-gamma", "Turn gamma on for both cameras.");
    parser.add(random_results, "R", "random-results", "Number of random points to produce per frame.  Can be a float in which case we'll take a random sample to decide if to produce the last one.  Disables real stereo processing.  Only for debugging / analysis!");
    parser.add(publish_all_images, "P", "publish-all-images", "Publish all images to LCM");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(configFile, &stereoConfig) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (video_file_left.length() > 0
        && video_file_right.length() <= 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a left one.)\n");

        return -1;
    }

     if (video_file_left.length() <= 0
        && video_file_right.length() > 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a right one.)\n");

        return -1;
    }

    recording_manager.Init(stereoConfig);

    // attempt to load video files / directories
    if (video_file_left.length() > 0) {
        if (recording_manager.LoadVideoFiles(video_file_left, video_file_right) != true) {
            // don't have videos, bail out.
            return -1;
        }
    }

    if (video_directory.length() > 0) {
        if (recording_manager.SetPlaybackVideoDirectory(video_directory) != true) {
            // bail
            return -1;
        }
    }

    recording_manager.SetQuietMode(quiet_mode);
    recording_manager.SetPlaybackFrameNumber(starting_frame_number);



    uint64 guid = stereoConfig.guidLeft;
    uint64 guid2 = stereoConfig.guidRight;

    // start up LCM
    lcm_t * lcm;
    lcm = lcm_create (stereoConfig.lcmUrl.c_str());


    unsigned long elapsed;

    Hud hud;


    // --- setup control-c handling ---
    struct sigaction sigIntHandler;

    sigIntHandler.sa_handler = control_c_handler;
    sigemptyset(&sigIntHandler.sa_mask);
    sigIntHandler.sa_flags = 0;

    sigaction(SIGINT, &sigIntHandler, NULL);
    // --- end ctrl-c handling code ---

    dc1394error_t   err;
    dc1394error_t   err2;


    // tell opencv to use only one core so that we can manage our
    // own threading without a fight
    setNumThreads(1);

    if (recording_manager.UsingLiveCameras()) {
        d = dc1394_new ();
        if (!d)
            cerr << "Could not create dc1394 context" << endl;

        d2 = dc1394_new ();
        if (!d2)
            cerr << "Could not create dc1394 context for camera 2" << endl;

        camera = dc1394_camera_new (d, guid);
        if (!camera)
        {
            cerr << "Could not create dc1394 camera... quitting." << endl;
            exit(1);
        }

        camera2 = dc1394_camera_new (d2, guid2);
        if (!camera2)
            cerr << "Could not create dc1394 camera for camera 2" << endl;
        // reset the bus
        dc1394_reset_bus(camera);
        dc1394_reset_bus(camera2);

        // setup
        err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

        err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

        // enable camera
        err = dc1394_video_set_transmission(camera, DC1394_ON);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
        err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

        InitBrightnessSettings(camera, camera2, enable_gamma);
    }

    if (show_display) {

        namedWindow("Input", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Input2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Stereo", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Left Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Right Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Debug 1", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Debug 2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);



        setMouseCallback("Input", onMouse); // for drawing disparity lines
        setMouseCallback("Stereo", onMouseStereo, &hud); // for drawing disparity lines

        moveWindow("Input", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 100);
        moveWindow("Stereo", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 370);
        moveWindow("Input2", stereoConfig.displayOffsetX + 478, stereoConfig.displayOffsetY + 100);
        moveWindow("Left Block", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 100);
        moveWindow("Right Block", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 100);

        moveWindow("Debug 1", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 670);
        moveWindow("Debug 2", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 670);

    } // show display

    if (show_display || publish_all_images) {
        // if a channel exists, subscribe to it
        if (stereoConfig.stereo_replay_channel.length() > 0) {
            stereo_replay_sub = lcmt_stereo_subscribe(lcm, stereoConfig.stereo_replay_channel.c_str(), &stereo_replay_handler, &hud);
        }

        if (stereoConfig.pose_channel.length() > 0) {
            mav_pose_t_sub = mav_pose_t_subscribe(lcm, stereoConfig.pose_channel.c_str(), &mav_pose_t_handler, &hud);
        }

        if (stereoConfig.gps_channel.length() > 0) {
            mav_gps_data_t_sub = mav_gps_data_t_subscribe(lcm, stereoConfig.gps_channel.c_str(), &mav_gps_data_t_handler, &hud);
        }

        if (stereoConfig.baro_airspeed_channel.length() > 0) {
            baro_airspeed_sub = lcmt_baro_airspeed_subscribe(lcm, stereoConfig.baro_airspeed_channel.c_str(), &baro_airspeed_handler, &hud);
        }

        if (stereoConfig.servo_out_channel.length() > 0) {
            servo_out_sub = lcmt_deltawing_u_subscribe(lcm, stereoConfig.servo_out_channel.c_str(), &servo_out_handler, &hud);
        }

        if (stereoConfig.battery_status_channel.length() > 0) {
            battery_status_sub = lcmt_battery_status_subscribe(lcm, stereoConfig.battery_status_channel.c_str(), &battery_status_handler, &hud);
        }

        if (stereoConfig.cpu_info_channel1.length() > 0) {
            cpu_info_sub1 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel1.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub2 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel2.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub3 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel3.c_str(), &cpu_info_handler, &recording_manager);
        }

        if (stereoConfig.log_size_channel1.length() > 0) {
            log_size_sub1 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel1.c_str(), &log_size_handler, &hud);
            log_size_sub2 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel2.c_str(), &log_size_handler, &hud);
            log_size_sub3 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel3.c_str(), &log_size_handler, &hud);
        }

    } // end show_display || publish_all_images

    // load calibration
    OpenCvStereoCalibration stereoCalibration;

    if (LoadCalibration(stereoConfig.calibrationDir, &stereoCalibration) != true)
    {
        cerr << "Error: failed to read calibration files. Quitting." << endl;
        return -1;
    }

    int inf_disparity_tester, disparity_tester;
    disparity_tester = GetDisparityForDistance(10, stereoCalibration, &inf_disparity_tester);

    std::cout << "computed disparity is = " << disparity_tester << ", inf disparity = " << inf_disparity_tester << std::endl;

    // subscribe to the stereo control channel
    stereo_control_sub = lcmt_stereo_control_subscribe(lcm, stereoConfig.stereoControlChannel.c_str(), &lcm_stereo_control_handler, NULL);


    Mat imgDisp;
    Mat imgDisp2;

    // initilize default parameters
    //PushbroomStereoState state; // HACK

    state.disparity = stereoConfig.disparity;
    state.zero_dist_disparity = stereoConfig.infiniteDisparity;
    state.sobelLimit = stereoConfig.interestOperatorLimit;
    state.horizontalInvarianceMultiplier = stereoConfig.horizontalInvarianceMultiplier;
    state.blockSize = stereoConfig.blockSize;
    state.random_results = random_results;
    state.check_horizontal_invariance = true;

    if (state.blockSize > 10 || state.blockSize < 1)
    {
        fprintf(stderr, "Warning: block size is very large "
            "or small (%d).  Expect trouble.\n", state.blockSize);
    }

    state.sadThreshold = stereoConfig.sadThreshold;

    state.mapxL = stereoCalibration.mx1fp;
    state.mapxR = stereoCalibration.mx2fp;
    state.Q = stereoCalibration.qMat;
    state.show_display = show_display;

    state.lastValidPixelRow = stereoConfig.lastValidPixelRow;

    Mat matL, matR;
    bool quit = false;

    if (recording_manager.UsingLiveCameras()) {
        matL = GetFrameFormat7(camera);
        matR = GetFrameFormat7(camera2);

        if (recording_manager.InitRecording(matL, matR) != true) {
            // failed to init recording, things are going bad.  bail.
            return -1;
        }

        // before we start, turn the cameras on and set the brightness and exposure
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);

        // grab a few frames and send them over LCM for the user
        // to verify that everything is working
        if (!show_display && !publish_all_images) {
            printf("Sending init images over LCM... ");
            fflush(stdout);

            for (int i = 0; i < 5; i++) {

                matL = GetFrameFormat7(camera);
                SendImageOverLcm(lcm, "stereo_image_left", matL, 50);

                matR = GetFrameFormat7(camera2);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 50);

                // don't send these too fast, otherwise we'll flood the ethernet link
                // and not actually be helpful

                // wait one second
                printf(".");
                fflush(stdout);

                sleep(1);
            }
            printf(" done.\n");
        }

    } // recording_manager.UsingLiveCameras()

    // spool up worker threads
    PushbroomStereo pushbroom_stereo;

    // start the framerate clock
    struct timeval start, now;
    gettimeofday( &start, NULL );

    while (quit == false) {

        // get the frames from the camera
        if (recording_manager.UsingLiveCameras()) {
            // we would like to match brightness every frame
            // but that would really hurt our framerate
            // match brightness every 10 frames instead
            if (numFrames % MATCH_BRIGHTNESS_EVERY_N_FRAMES == 0)
            {
                MatchBrightnessSettings(camera, camera2);
            }

            // capture images from the cameras
            matL = GetFrameFormat7(camera);
            matR = GetFrameFormat7(camera2);

            // record video
            recording_manager.AddFrames(matL, matR);


        } else {
            // using a video file -- get the next frame
            recording_manager.GetFrames(matL, matR);
        }

        cv::vector<Point3f> pointVector3d;
        cv::vector<uchar> pointColors;
        cv::vector<Point3i> pointVector2d; // for display
        cv::vector<Point3i> pointVector2d_inf; // for display

        // do the main stereo processing
        if (disable_stereo != true) {

            gettimeofday( &now, NULL );
            double before = now.tv_usec + now.tv_sec * 1000 * 1000;

            pushbroom_stereo.ProcessImages(matL, matR, &pointVector3d, &pointColors, &pointVector2d, state);

            gettimeofday( &now, NULL );
            double after = now.tv_usec + now.tv_sec * 1000 * 1000;

            timer_sum += after-before;
            timer_count ++;

        }

        // build an LCM message for the stereo data
        lcmt_stereo msg;


        if (recording_manager.UsingLiveCameras() || stereo_lcm_msg == NULL) {
            msg.timestamp = getTimestampNow();
        } else {
            // if we are replaying videos, preserve the timestamp of the original video
            msg.timestamp = stereo_lcm_msg->timestamp;

        }


        msg.number_of_points = (int)pointVector3d.size();

        float x[msg.number_of_points];
        float y[msg.number_of_points];
        float z[msg.number_of_points];
        uchar grey[msg.number_of_points];

        for (unsigned int i=0;i<pointVector3d.size();i++) {

            x[i] = pointVector3d[i].x / stereoConfig.calibrationUnitConversion;
            y[i] = pointVector3d[i].y / stereoConfig.calibrationUnitConversion;
            z[i] = pointVector3d[i].z / stereoConfig.calibrationUnitConversion;
            grey[i] = pointColors[i];
        }

        msg.x = x;
        msg.y = y;
        msg.z = z;
        msg.grey = grey;
        msg.frame_number = recording_manager.GetFrameNumber();

        if (recording_manager.UsingLiveCameras()) {
            msg.frame_number = msg.frame_number - 1;  // minus one since recording manager has
                                                      // already recorded this frame (above in
                                                      // AddFrames) but we haven't made a message
                                                      // for it yet
        }


        msg.video_number = recording_manager.GetRecVideoNumber();

        // publish the LCM message
        if (last_frame_number != msg.frame_number) {
            lcmt_stereo_publish(lcm, "stereo", &msg);
            last_frame_number = msg.frame_number;
        }

        if (publish_all_images) {
            if (recording_manager.GetFrameNumber() != last_playback_frame_number) {
                SendImageOverLcm(lcm, "stereo_image_left", matL, 80);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 80);

                last_playback_frame_number = recording_manager.GetFrameNumber();
            }

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        }

        Mat matDisp, remapL, remapR;

        if (show_display) {
            // we remap again here because we're just in display
            Mat remapLtemp(matL.rows, matL.cols, matL.depth());
            Mat remapRtemp(matR.rows, matR.cols, matR.depth());

            remapL = remapLtemp;
            remapR = remapRtemp;

            remap(matL, remapL, stereoCalibration.mx1fp, Mat(), INTER_NEAREST);
            remap(matR, remapR, stereoCalibration.mx2fp, Mat(), INTER_NEAREST);

            remapL.copyTo(matDisp);

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        } // end show_display


        if (show_display) {

            for (unsigned int i=0;i<pointVector2d.size();i++) {
                int x2 = pointVector2d[i].x;
                int y2 = pointVector2d[i].y;
                //int sad = pointVector2d[i].z;
                rectangle(matDisp, Point(x2,y2), Point(x2+state.blockSize, y2+state.blockSize), 0,  CV_FILLED);
                rectangle(matDisp, Point(x2+1,y2+1), Point(x2+state.blockSize-1, y2-1+state.blockSize), 255);

            }

            // draw pixel blocks
            if (lineLeftImgPosition >= 0 && lineLeftImgPositionY > 1) {
                DisplayPixelBlocks(remapL, remapR, lineLeftImgPosition - state.blockSize/2, lineLeftImgPositionY - state.blockSize/2, state, &pushbroom_stereo);
            }

            // draw a line for the user to show disparity
            DrawLines(remapL, remapR, matDisp, lineLeftImgPosition, lineLeftImgPositionY, state.disparity, state.zero_dist_disparity);


            if (visualize_stereo_hits == true && stereo_lcm_msg != NULL) {

                // transform the points from 3D space back onto the image's 2D space
                vector<Point3f> lcm_points;
                Get3DPointsFromStereoMsg(stereo_lcm_msg, &lcm_points);

                // draw the points on the unrectified image (to see these
                // you must pass the -u flag)
                Draw3DPointsOnImage(matL, &lcm_points, stereoCalibration.M1, stereoCalibration.D1, stereoCalibration.R1, 128);

            }

            if (show_unrectified == false) {

                imshow("Input", remapL);
                imshow("Input2", remapR);
            } else {
                imshow("Input", matL);
                imshow("Input2", matR);
            }


            if (display_hud) {
                Mat with_hud;

                recording_manager.SetHudNumbers(&hud);

                hud.DrawHud(matDisp, with_hud);

                if (record_hud) {
                    // put this frame into the HUD recording
                    recording_manager.RecFrameHud(with_hud);

                }

                imshow("Stereo", with_hud);
            } else {
                imshow("Stereo", matDisp);
            }


            char key = waitKey(show_display_wait);

            if (key != 255 && key != -1)
            {
                cout << endl << key << endl;
            }

            switch (key)
            {
                case 'T':
                    state.disparity --;
                    break;
                case 'R':
                    state.disparity ++;
                    break;

                case 'w':
                    state.sobelLimit += 10;
                    break;

                case 's':
                    state.sobelLimit -= 10;
                    break;

                case 'd':
                    state.horizontalInvarianceMultiplier -= 0.1;
                    break;

                case 'D':
                    state.horizontalInvarianceMultiplier += 0.1;
                    break;

                case 'g':
                    state.blockSize ++;
                    break;

                case 'b':
                    state.blockSize --;
                    if (state.blockSize < 1) {
                        state.blockSize = 1;
                    }
                    break;

                case 'Y':
                    state.sadThreshold += 50;
                    break;

                case 'y':
                    state.sadThreshold ++;
                    break;

                case 'h':
                    state.sadThreshold --;
                    break;

                case 'H':
                    state.sadThreshold -= 50;
                    break;

                case 'm':
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '1':
                    force_brightness --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '2':
                    force_brightness ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '3':
                    force_exposure --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '4':
                    force_exposure ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '5':
                    // to show SAD boxes
                    state.sobelLimit = 0;
                    state.sadThreshold = 255;
                    break;

                case 'I':
                    state.check_horizontal_invariance = !state.check_horizontal_invariance;
                    break;

                case '.':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 1);
                    break;

                case ',':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 1);
                    break;

                case '>':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 50);
                    break;

                case '<':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 50);
                    break;

                //case 'k':
                //    state.zero_dist_disparity ++;
                 //   break;

                case 'l':
                    state.zero_dist_disparity --;
                    break;

                case 'o':
                    inf_sad_add --;
                    break;

                case 'p':
                    inf_sad_add ++;
                    break;

                case '[':
                    y_offset --;
                    if (y_offset < 0) {
                        y_offset = 0;
                    }
                    break;

                case ']':
                    y_offset ++;
                    break;

                case 'v':
                    display_hud = !display_hud;
                    break;

                case 'c':
                    hud.SetClutterLevel(hud.GetClutterLevel() + 1);
                    break;

                case 'C':
                    hud.SetClutterLevel(hud.GetClutterLevel() - 1);
                    break;

                case '}':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() + 1);
                    break;
                case '{':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() - 1);
                    break;

                case 'S':
                    // take a screen cap of the left and right images
                    // useful for putting into a stereo tuner
                    printf("\nWriting left.ppm...");
                    imwrite("left.ppm", remapL);

                    printf("\nWriting right.ppm...");
                    imwrite("right.ppm", remapR);

                    printf("\ndone.");
                    break;

                case 'V':
                    // record the HUD
                    record_hud = true;
                    recording_manager.RestartRecHud();
                    break;

                    /*
                case 'j':
                    state.debugJ --;
                    break;

                case 'J':
                    state.debugJ ++;
                    break;

                case 'i':
                    state.debugI --;
                    break;

                case 'I':
                    state.debugI ++;
                    break;

                case 'k':
                    state.debugDisparity --;
                    break;

                case 'K':
                    state.debugDisparity ++;
                    break;

                    */

                case 'q':
                    quit = true;
                    break;
            }

            if (key != 255 && key != -1)
            {
                cout << "sadThreshold = " << state.sadThreshold << endl;
                cout << "sobelLimit = " << state.sobelLimit << endl;
                cout << "horizontalInvarianceMultiplier = " << state.horizontalInvarianceMultiplier << endl;
                cout << "brightness: " << force_brightness << endl;
                cout << "exposure: " << force_exposure << endl;
                cout << "disparity = " << state.disparity << endl;
                cout << "inf_disparity = " << state.zero_dist_disparity << endl;
                cout << "inf_sad_add = " << inf_sad_add << endl;
                cout << "blockSize = " << state.blockSize << endl;
                cout << "frame_number = " << recording_manager.GetFrameNumber() << endl;
                cout << "y offset = " << y_offset << endl;
                cout << "PitchRangeOfLens = " << hud.GetPitchRangeOfLens() << endl;
            }
        } // end show_display

        numFrames ++;

        // check for new LCM messages
        NonBlockingLcm(lcm);

        if (quiet_mode == false || numFrames % 100 == 0) {
            // compute framerate
            gettimeofday( &now, NULL );

            elapsed = (now.tv_usec / 1000 + now.tv_sec * 1000) -
            (start.tv_usec / 1000 + start.tv_sec * 1000);

            printf("\r%d frames (%lu ms) - %4.1f fps | %4.1f ms/frame, stereo: %f", numFrames, elapsed, (float)numFrames/elapsed * 1000, elapsed/(float)numFrames, timer_sum/(double)timer_count);
            fflush(stdout);
        }


    } // end main while loop

    printf("\n\n");

    destroyWindow("Input");
    destroyWindow("Input2");
    destroyWindow("Stereo");

    // close camera
    if (recording_manager.UsingLiveCameras()) {
        StopCapture(d, camera);
        StopCapture(d2, camera2);
    }

    return 0;
}
bool ConfigOptionsDialog::ParseConfigOptions(const ConfigToken &setProfile)
{
  wxStandardPaths stdPaths;

  // Reset any grid values
  gridValues.childHeaderArray.clear();
  gridValues.childSelectArray.clear();

  // Get the directory that contains the main options
  wxFileName mainConfigDef;
  mainConfigDef.AssignDir(stdPaths.GetDataDir());
  mainConfigDef.AppendDir("MainLib");
  mainConfigDef.SetFullName(wxT("gliConfig_Definition.ini"));

  // Parse the main config file
  if(!ParseConfigFile(mainConfigDef, false))
  {
    return false;
  }

  // Get the plugin directory
  wxFileName pluginDir;
  pluginDir.AssignDir(stdPaths.GetDataDir());
  pluginDir.AppendDir("Plugins");
  wxDir searchDir(pluginDir.GetFullPath());
  
  // Search for all directories under the "Plugins" directory
  wxArrayString foundDirs;
  DirListTraverse dirTraverse(foundDirs);
  searchDir.Traverse(dirTraverse);

  // Loop for all plugins directories and get any plugin options  
  for(uint i=0; i<foundDirs.size(); i++)
  {
    wxFileName pluginConfigDef;
    pluginConfigDef.AssignDir(foundDirs[i]);
    pluginConfigDef.SetFullName(wxT("config_Definition.ini"));

    // If the file exists
    if(pluginConfigDef.FileExists())
    {
      // Parse the main config file
      if(!ParseConfigFile(pluginConfigDef, true))
      {
        return false;
      }
    }
    else
    {
      wxLogWarning("Unable to find plugin config definition (%s) - file does not exist", pluginConfigDef.GetFullPath().c_str());
    }
  }

  // Set the defualts from the passed config options
  if(!SetProfileData(setProfile, gridValues))
  { 
    wxLogError("Unable to set selected config options");     
    return false;
  }
   
  // Lock and clear the property grid 
  propGrid->Freeze();

  // Add the options to the property grid
  string parentPath("");
  if(!AddPropGridHeader(gridValues, 0, true, parentPath))
  {
    propGrid->Thaw();
    return false;
  }
  
  // Release the property grid
  propGrid->Thaw();

  return true;
}
예제 #18
0
void main(int argc, char *argv[])
{
	int error, i;
	DWORD dwNumWritten;
	char pszUserName[100], pszPipeName[MAX_PATH];
	DWORD length;
	char pBuffer[4096];
	int nGroupId;
	int nNproc = 1;
	bool bGetHosts = false;
	bool bUseNP = false;
	//char pszCmdLine[1024];
	WSADATA wsaData;
	int err;

	//TCHAR pszJobID[100];
	//TCHAR pszEnv[MAX_PATH] = TEXT("");
	TCHAR pszDir[MAX_PATH] = TEXT(".");

	// Start the Winsock dll.
	if ((err = WSAStartup( MAKEWORD( 2, 0 ), &wsaData )) != 0)
	{
		printf("Winsock2 dll not initialized, error: %d\n", err);
		return;
	}

	/*
	bGetHosts = !GetOpt(argc, argv, "-np", &nNproc);

	if (argc == 1)
	{
		printf("No command line specified\n");
		return;
	}
	//*/
	GetOpt(argc, argv, "-env", g_pszEnv);
	if (!GetOpt(argc, argv, "-dir", pszDir))
		GetCurrentDirectory(MAX_PATH, pszDir);

	SetCurrentDirectory(pszDir);
	
	DWORD dwType;
	if (GetBinaryType(argv[1], &dwType))
	{
		// The first argument is an executable so set things up to run one process
		g_nHosts = 1;
		TCHAR pszTempExe[MAX_PATH], *namepart;
		_tcscpy(g_pszExe, argv[1]);
		GetFullPathName(g_pszExe, MAX_PATH, pszTempExe, &namepart);
		// Quote the executable in case there are spaces in the path
		_stprintf(g_pszExe, TEXT("\"%s\""), pszTempExe);
		g_pszArgs[0] = TEXT('\0');
		for (int i=2; i<argc; i++)
		{
			_tcscat(g_pszArgs, argv[i]);
			if (i < argc-1)
				_tcscat(g_pszArgs, TEXT(" "));
		}
		RunLocal(true);
		return;
	}
	else
	{
		if (GetOpt(argc, argv, "-np", &g_nHosts))
		{
			if (g_nHosts < 1)
			{
				printf("Error: must specify a number greater than 0 after the -np option\n");
				return;
			}
			if (argc < 2)
			{
				printf("Error: not enough arguments.\n");
				return;
			}
			_tcscpy(g_pszExe, argv[1]);
			g_pszArgs[0] = TEXT('\0');
			for (int i=2; i<argc; i++)
			{
				_tcscat(g_pszArgs, argv[i]);
				if (i < argc-1)
					_tcscat(g_pszArgs, TEXT(" "));
			}
			bUseNP = true;
		}
		else
		if (GetOpt(argc, argv, "-localonly", &g_nHosts))
		{
			bool bDoSMP = !GetOpt(argc, argv, "-tcp");
			if (g_nHosts < 1)
			{
				printf("Error: must specify a number greater than 0 after the -localonly option\n");
				return;
			}
			if (argc < 2)
			{
				printf("Error: not enough arguments.\n");
				return;
			}
			TCHAR pszTempExe[MAX_PATH], *namepart;
			_tcscpy(g_pszExe, argv[1]);
			GetFullPathName(g_pszExe, MAX_PATH, pszTempExe, &namepart);
			// Quote the executable in case there are spaces in the path
			_stprintf(g_pszExe, TEXT("\"%s\""), pszTempExe);
			g_pszArgs[0] = TEXT('\0');
			for (int i=2; i<argc; i++)
			{
				_tcscat(g_pszArgs, argv[i]);
				if (i < argc-1)
					_tcscat(g_pszArgs, TEXT(" "));
			}
			RunLocal(bDoSMP);
			return;
		}
		else
		{
			ParseConfigFile(argv[1]);
			if ((_tcslen(g_pszArgs) > 0) && (argc > 2))
				_tcscat(g_pszArgs, TEXT(" "));
			for (int i=2; i<argc; i++)
			{
				_tcscat(g_pszArgs, argv[i]);
				if (i < argc-1)
					_tcscat(g_pszArgs, TEXT(" "));
			}
		}
	}

	TCHAR pszTempExe[MAX_PATH], *namepart;
	GetFullPathName(g_pszExe, MAX_PATH, pszTempExe, &namepart);
	// Quote the executable in case there are spaces in the path
	_stprintf(g_pszExe, TEXT("\"%s\""), pszTempExe);


	// Figure out how many processes to launch
	nNproc = 0;
	if (bUseNP)
		nNproc = g_nHosts;
	else
	{
		HostNode *n = g_pHosts;
		while (n)
		{
			nNproc += n->nSMPProcs;
			n = n->next;
		}
	}

	length = 100;
	if (GetUserName(pszUserName, &length))
		sprintf(pszPipeName, "\\\\.\\pipe\\mpd%s", pszUserName);
	else
		strcpy(pszPipeName, "\\\\.\\pipe\\mpdpipe");
	
	//printf("MPIRunMPD connecting to pipe '%s'\n", pszPipeName);
	HANDLE hPipe = CreateFile(
		pszPipeName,
		GENERIC_READ | GENERIC_WRITE,
		0, NULL,
		OPEN_EXISTING,
		0, NULL);
	
	if (hPipe != INVALID_HANDLE_VALUE)
	{
		HANDLE hOutputPipe;
		HANDLE hIOThread, hReadyEvent;
		
		strcat(pszPipeName, "out");
		hOutputPipe = CreateNamedPipe(
			pszPipeName,
			PIPE_ACCESS_DUPLEX | FILE_FLAG_WRITE_THROUGH,
			PIPE_TYPE_MESSAGE | PIPE_READMODE_BYTE | PIPE_WAIT,
			PIPE_UNLIMITED_INSTANCES,
			0,0,0, 
			NULL
			);
		
		if (hOutputPipe == INVALID_HANDLE_VALUE)
		{
			error = GetLastError();
			printf("Unable to create pipe: error %d on pipe '%s'\n", error, pszPipeName);
			CloseHandle(hPipe);
			ExitProcess(error);
		}
		
		WriteFile(hPipe, pszPipeName, strlen(pszPipeName)+1, &dwNumWritten, NULL);
		//printf("MPIRunMPD waiting for connection back on pipe '%s'\n", pszPipeName);
		if (ConnectNamedPipe(hOutputPipe, NULL))
		{
			strcpy(pBuffer, "create group\n");
			WriteFile(hPipe, pBuffer, strlen(pBuffer), &dwNumWritten, NULL);
			GetString(hOutputPipe, pBuffer);
			nGroupId = atoi(pBuffer);

			//printf("group id acquired: %d\n", nGroupId);

			LaunchNode *pList = NULL, *p;

			if (bUseNP)
			{
				p = pList = new LaunchNode;
				pList->pNext = NULL;

				sprintf(pBuffer, "next %d\n", nNproc);
				WriteFile(hPipe, pBuffer, strlen(pBuffer), &dwNumWritten, NULL);
				for (i=0; i<nNproc; i++)
				{
					GetString(hOutputPipe, pBuffer);
					//printf("host%d: %s\n", i, pBuffer);
					//strcpy(p->pszCmdLine, pszCmdLine);
					strcpy(p->pszCmdLine, g_pszExe);
					strcpy(p->pszArgs, g_pszArgs);
					strcpy(p->pszIPPort, pBuffer);
					strcpy(p->pszDir, ".");
					p->pszEnv[0] = '\0';
					p->nIP = 0;
					p->nPort = 0;
					p->pNext = NULL;
					if (i<nNproc-1)
					{
						p->pNext = new LaunchNode;
						p = p->pNext;
					}
				}
			}
			else
			{
				int iproc = 0;
				int nShmLow = 0, nShmHigh = 0;
				unsigned long nCurIP;
				int nCurPort;
				
				while (g_pHosts)
				{
					nShmLow = iproc;
					nShmHigh = iproc + g_pHosts->nSMPProcs - 1;

					sprintf(pBuffer, "find %s\n", g_pHosts->host);
					WriteFile(hPipe, pBuffer, strlen(pBuffer), &dwNumWritten, NULL);
					GetString(hOutputPipe, pBuffer);
					nCurPort = atoi(pBuffer);
					NT_get_ip(g_pHosts->host, &nCurIP);

					for (int i=0; i<g_pHosts->nSMPProcs; i++)
					{
						if (pList == NULL)
						{
							pList = p = new LaunchNode;
							pList->pNext = NULL;
						}
						else
						{
							p->pNext = new LaunchNode;
							p = p->pNext;
							p->pNext = NULL;
						}
						if (strlen(g_pHosts->exe) > 0)
							strcpy(p->pszCmdLine, g_pHosts->exe);
						else
							strcpy(p->pszCmdLine, g_pszExe);
						strcpy(p->pszArgs, g_pszArgs);
						p->nIP = nCurIP;
						p->nPort = nCurPort;
						
						sprintf(p->pszEnv, 
							"MPICH_USE_MPD=1|MPICH_JOBID=mpi%d|MPICH_NPROC=%d|MPICH_IPROC=%d|MPICH_SHM_LOW=%d|MPICH_SHM_HIGH=%d", 
							nGroupId, nNproc, iproc, nShmLow, nShmHigh);
						
						if (strlen(g_pszEnv) > 0)
						{
							strcat(p->pszEnv, "|");
							strcat(p->pszEnv, g_pszEnv);
						}
						iproc++;
					}
					
					HostNode *n = g_pHosts;
					g_pHosts = g_pHosts->next;
					delete n;
				}
			}

			hReadyEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
			DWORD dwThreadID;
			hIOThread = CreateThread(NULL, 0, (LPTHREAD_START_ROUTINE)RedirectIOLoopThread, hReadyEvent, 0, &dwThreadID);
			if (WaitForSingleObject(hReadyEvent, 5000) != WAIT_OBJECT_0)
			{
				printf("Wait for hReadyEvent failed, error %d\n", GetLastError());
				ExitProcess(1);
			}

			//printf("IO loop waiting on socket: %s:%d\n", g_pszIOListenHost, g_nIOListenPort);

			// launch processes
			g_nConnectionsLeft = nNproc * 2; // 1 for stdout and 1 for stderr
			p = pList;
			for (i=0; i<nNproc; i++)
			{
				if (i == 0)
					sprintf(pBuffer, "launch h'%s'c'%s'a'%s'g'%d'r'%d'0'%s:%d'1'%s:%d'2'%s:%d'\n", 
						p->pszIPPort, p->pszCmdLine, p->pszArgs, nGroupId, i, 
						g_pszIOListenHost, g_nIOListenPort, 
						g_pszIOListenHost, g_nIOListenPort, 
						g_pszIOListenHost, g_nIOListenPort);
				else
					sprintf(pBuffer, "launch h'%s'c'%s'a'%s'g'%d'r'%d'1'%s:%d'2'%s:%d'\n", 
						p->pszIPPort, p->pszCmdLine, p->pszArgs, nGroupId, i, 
						g_pszIOListenHost, g_nIOListenPort, 
						g_pszIOListenHost, g_nIOListenPort);
				WriteFile(hPipe, pBuffer, strlen(pBuffer), &dwNumWritten, NULL);
				p = p->pNext;
				delete pList;
				pList = p;
			}

			strcpy(pBuffer, "done\n");
			WriteFile(hPipe, pBuffer, strlen(pBuffer), &dwNumWritten, NULL);

			CloseHandle(hPipe);
			CloseHandle(hOutputPipe);

			WaitForSingleObject(g_hNoMoreConnectionsEvent, INFINITE);
		}
		else
		{
			error = GetLastError();
			printf("unable to connect to client pipe: error %d\n", error);
			CloseHandle(hPipe);
			CloseHandle(hOutputPipe);
		}
	}
	WSACleanup();
}
예제 #19
0
int main(int argc, char *argv[])
{
    // parse configuration file
    // get input arguments

    OpenCvStereoConfig stereo_config;
    string config_file = "";

    ConciseArgs parser(argc, argv);
    parser.add(config_file, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(left_camera_mode, "l", "left-camera", "Calibrate just the left camera.");
    parser.add(right_camera_mode, "r", "right-camera", "Calibrate just the right camera.");
    parser.add(force_brightness, "b", "brightness", "set brightness to this level");
    parser.add(force_exposure, "e", "exposure", "set exposure to this level");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(config_file, &stereo_config) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (left_camera_mode || right_camera_mode)
    {
        stereo_mode = false;
    }

    uint64 guid = stereo_config.guidLeft;
    uint64 guid2 = stereo_config.guidRight;


    dc1394_t        *d;
    dc1394camera_t  *camera;
    dc1394error_t   err;

    Mat frame_array_left[MAX_FRAMES];
    Mat frame_array_right[MAX_FRAMES];

    int numFrames = 0;

    // ----- cam 2 -----
    dc1394_t        *d2;
    dc1394camera_t  *camera2;
    dc1394error_t   err2;

    d = dc1394_new ();
    if (!d)
        g_critical("Could not create dc1394 context");

    d2 = dc1394_new ();
    if (!d2)
        g_critical("Could not create dc1394 context for camera 2");

    camera = dc1394_camera_new (d, guid);
    if (!camera)
        g_critical("Could not create dc1394 camera");

    camera2 = dc1394_camera_new (d2, guid2);
    if (!camera2)
        g_critical("Could not create dc1394 camera for camera 2");

    // setup
    err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
    DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

    err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
    DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

    // enable auto-exposure
    // turn on the auto exposure feature
    err = dc1394_feature_set_power(camera, DC1394_FEATURE_EXPOSURE, DC1394_ON);
    DC1394_ERR_RTN(err,"Could not turn on the exposure feature");

    err = dc1394_feature_set_mode(camera, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO);
    DC1394_ERR_RTN(err,"Could not turn on Auto-exposure");

    // enable auto-exposure
    // turn on the auto exposure feature
    err = dc1394_feature_set_power(camera2, DC1394_FEATURE_EXPOSURE, DC1394_ON);
    DC1394_ERR_RTN(err,"Could not turn on the exposure feature for cam2");

    err = dc1394_feature_set_mode(camera2, DC1394_FEATURE_EXPOSURE, DC1394_FEATURE_MODE_ONE_PUSH_AUTO);
    DC1394_ERR_RTN(err,"Could not turn on Auto-exposure for cam2");

    // enable camera
    err = dc1394_video_set_transmission(camera, DC1394_ON);
    DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
    err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
    DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

    if (left_camera_mode || stereo_mode)
    {
        InitBrightnessSettings(camera, camera2);
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
    } else {
        // use the right camera as the master for brightness
        // since we're calibrating that one
        InitBrightnessSettings(camera2, camera);
        MatchBrightnessSettings(camera2, camera, true);
    }

    // make opencv windows
    if (left_camera_mode || stereo_mode)
    {
    	namedWindow("Input Left", CV_WINDOW_AUTOSIZE);
    	moveWindow("Input Left", 100, 100);
    }

    if (right_camera_mode || stereo_mode)
    {
    	namedWindow("Input Right", CV_WINDOW_AUTOSIZE);
    	moveWindow("Input Right", 478, 100);
    }


    CvSize size;

    Mat cornersL, cornersR;

    int i;

    while (numFrames < MAX_FRAMES) {

        Mat chessL, chessR;

        // each loop dump a bunch of frames to clear the buffer
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
        for (i=0;i<10;i++)
        {
            if (left_camera_mode || stereo_mode)
            {
                chessL = GetFrameFormat7(camera);
            }

            if (right_camera_mode || stereo_mode)
            {
                chessR = GetFrameFormat7(camera2);
            }
        }

        // copy the images for drawing/display
        size = chessL.size();
        Mat chessLc;
        chessLc.create(size, CV_32FC3);
        Mat chessRc;
        chessRc.create(size, CV_32FC3);

        // attempt checkerboard matching
        bool foundPattern = true; // set to true so we can do an OR
                                  // later if we're only using one
                                  // camera

        if (left_camera_mode || stereo_mode)
        {
            foundPattern = findChessboardCorners(chessL, Size(CHESS_X, CHESS_Y), cornersL);
        }

        if (right_camera_mode || stereo_mode)
        {
            foundPattern = foundPattern & findChessboardCorners(chessR, Size(CHESS_X, CHESS_Y), cornersR);
        }

        if (left_camera_mode || stereo_mode)
        {
            cvtColor( chessL, chessLc, CV_GRAY2BGR );
            drawChessboardCorners(chessLc, Size(CHESS_X, CHESS_Y), cornersL, foundPattern);
            imshow("Input Left", chessLc);
        }

        if (right_camera_mode || stereo_mode)
        {
            cvtColor(chessR, chessRc, CV_GRAY2BGR);
            drawChessboardCorners(chessRc, Size(CHESS_X, CHESS_Y), cornersR, foundPattern);

            imshow("Input Right", chessRc);
        }


		// key codes:
		// page up: 654365
		// page down: 65366
		// b: 98
		char key = waitKey();
		//printf("%d\n", (int)key);
		if (key == 98)
		{
		    break;
		} else if (key == 86){
		    if (foundPattern)
		    {
		        // this was a good one -- save it
		        frame_array_left[numFrames] = chessL;
                frame_array_right[numFrames] = chessR;

                // give the user some guidence on the number
                // of frames they should be using
                if (stereo_mode)
                {
                    printf("Saved frame %d / about 10\n", numFrames);
                } else {
                    printf("Saved frame %d / about 20-30\n", numFrames);
                }

                numFrames ++;
            } else {
                printf("Not saving frame since did not find a checkboard.\n");
            }
		} else if (key == 'W') {
            force_brightness +=20;
            cout << "Brightness: " << force_brightness << "\n";
        } else if (key == 'w') {
            force_brightness -=20;
            cout << "Brightness: " << force_brightness << "\n";
        } else if (key == 'E') {
            force_exposure +=20;
            cout << "Exposure: " << force_exposure << "\n";
        } else if (key == 'e') {
            force_exposure -=20;
            cout << "Exposure: " << force_exposure << "\n";
        }
	}

    printf("\n\n");

    // clear out the calibration directory
    printf("Deleting old images...\nrm calibrationImages/*.ppm\n");
    int retval = system("rm calibrationImages/*.ppm");
    if (retval != 0) {
        printf("Warning: Deleting images may have failed.\n");
    }
    printf("done.\n");

    char filename[1000];

    for (i=0;i<numFrames;i++)
    {
        if (left_camera_mode || stereo_mode)
        {
            sprintf(filename, "calibrationImages/cam1-%05d.ppm", i+1);
            imwrite(filename, frame_array_left[i]);
        }

        if (right_camera_mode || stereo_mode)
        {
            sprintf(filename, "calibrationImages/cam2-%05d.ppm", i+1);
            imwrite(filename, frame_array_right[i]);
        }

        printf("Writing frame %d\n", i);
    }

    printf("\n\n");

    destroyWindow("Input Left");
    destroyWindow("Input Right");

    // stop data transmission
    err = dc1394_video_set_transmission(camera, DC1394_OFF);
    DC1394_ERR_CLN_RTN(err,cleanup_and_exit(camera),"Could not stop the camera");

    err2 = dc1394_video_set_transmission(camera2, DC1394_OFF);
    DC1394_ERR_CLN_RTN(err2,cleanup_and_exit(camera2),"Could not stop the camera 2");

    // close camera
    cleanup_and_exit(camera);
    cleanup_and_exit(camera2);
    dc1394_free (d);
    dc1394_free (d2);

    return 0;
}
예제 #20
0
bool Parser::ParseConfig(const std::string& path) {
    if (is_dir(path.c_str())) {
        return ParseConfigDir(path);
    }
    return ParseConfigFile(path);
}