コード例 #1
0
ファイル: canvas.c プロジェクト: AlfiyaZi/raytracing-render
Canvas *
read_png(char * file_name) {
    
    unsigned char header[8];    // 8 is the maximum size that can be checked
    
    // open file and test for it being a png
    FILE *fp = fopen(file_name, "rb");
    if (!fp)
        abort_("[read_png_file] File %s could not be opened for reading", file_name);
    fread(header, 1, 8, fp);
    if (png_sig_cmp(header, 0, 8))
        abort_("[read_png_file] File %s is not recognized as a PNG file", file_name);
    
    
    // initialize stuff
    png_structp png_ptr = png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
    
    if (!png_ptr)
        abort_("[read_png_file] png_create_read_struct failed");
    
    png_infop info_ptr = png_create_info_struct(png_ptr);
    if (!info_ptr)
        abort_("[read_png_file] png_create_info_struct failed");
    
    if (setjmp(png_jmpbuf(png_ptr)))
        abort_("[read_png_file] Error during init_io");
    
    png_init_io(png_ptr, fp);
    png_set_sig_bytes(png_ptr, 8);
    
    png_read_info(png_ptr, info_ptr);
    
    int width = png_get_image_width(png_ptr, info_ptr);
    int height = png_get_image_height(png_ptr, info_ptr);
    
    png_set_interlace_handling(png_ptr);
    png_read_update_info(png_ptr, info_ptr);
    
    
    // read file
    if (setjmp(png_jmpbuf(png_ptr)))
        abort_("[read_png_file] Error during read_image");
    
    png_bytep * row_pointers = (png_bytep*) malloc(sizeof(png_bytep) * height);
    int y;
    for (y = 0; y < height; y++)
        row_pointers[y] = (png_byte*) malloc(png_get_rowbytes(png_ptr,info_ptr));
    
    png_read_image(png_ptr, row_pointers);
    
    Canvas * canvas = new_canvas(width, height);
    int x;
    for (y=0; y < canvas->h; y++) {
        png_byte * row = row_pointers[y];
        for(x = 0; x < canvas->w; x++) {
            png_byte * ptr = &(row[x * 3]);
            
            set_pixel(x, y, rgb(ptr[0], ptr[1], ptr[2]), canvas);
        }
    }
    
    
    // cleanup heap allocation
    for (y=0; y < canvas->h; y++)
        free(row_pointers[y]);
    free(row_pointers);
    
    fclose(fp);
    
    return canvas;
}
コード例 #2
0
ファイル: PixelBuffer.cpp プロジェクト: Materdaddy/xLights
void PixelBufferClass::Color2HSV(const wxColour& color, wxImage::HSVValue& hsv)
{
    wxImage::RGBValue rgb(color.Red(),color.Green(),color.Blue());
    hsv=wxImage::RGBtoHSV(rgb);
}
コード例 #3
0
ファイル: n3d_ex_rgb.cpp プロジェクト: 8BitPimp/nano3d
void n3d_raster_rgb_raster(
    const n3d_rasterizer_t::state_t& s,
    const n3d_rasterizer_t::triangle_t& t,
    void* user)
{
    // bin / triangle intersection boundary
    const aabb_t bound = get_bound(s, t);
    const float offsetx = s.offset_.x + bound.x0;
    const float offsety = s.offset_.y + bound.y0;
    const uint32_t pitch = s.pitch_;

    // ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
    // barycentric interpolants
          vec3f_t bc_vy = { t.v_ [e_attr_b0], t.v_ [e_attr_b1], t.v_ [e_attr_b2] };
    const vec3f_t bc_sx = { t.sx_[e_attr_b0], t.sx_[e_attr_b1], t.sx_[e_attr_b2] };
    const vec3f_t bc_sy = { t.sy_[e_attr_b0], t.sy_[e_attr_b1], t.sy_[e_attr_b2] };
    // shift to offset
    bc_vy += bc_sx * offsetx;
    bc_vy += bc_sy * offsety;

    // ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
    // colour interpolants
          vec3f_t cl_vy = { t.v_ [6], t.v_ [7], t.v_ [8] };
    const vec3f_t cl_sx = { t.sx_[6], t.sx_[7], t.sx_[8] };
    const vec3f_t cl_sy = { t.sy_[6], t.sy_[7], t.sy_[8] };
    // shift to offset
    cl_vy += cl_sx * offsetx;
    cl_vy += cl_sy * offsety;

    // ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
    // 1/w interpolants
          float w_vy = t.v_ [e_attr_w];
    const float w_sx = t.sx_[e_attr_w];
    const float w_sy = t.sy_[e_attr_w];
    // shift to offset
    w_vy += w_sx * offsetx;
    w_vy += w_sy * offsety;

    // ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ---- ----
    uint32_t* dst = s.target_[n3d_target_pixel].uint32_;
    float* depth = s.target_[n3d_target_depth].float_;

    // pre step the buffers to x/y location
    dst   += pitch * bound.y0;
    depth += pitch * bound.y0;

    // y axis
    for (int32_t y = bound.y0; y < bound.y1; ++y) {

        vec3f_t bc_vx = bc_vy;
        vec3f_t cl_vx = cl_vy;
        float w_vx = w_vy;

        // x axis
        for (int32_t x = bound.x0; x < bound.x1; ++x) {

            // check if inside triangle
            if (bc_vx.x >= 0.f && bc_vx.y >= 0.f && bc_vx.z >= 0.f) {

                // depth test (w buffering)
                if (w_vx > depth[x]) {

                    // find fragment colour
                    const float r = cl_vx.x / w_vx;
                    const float g = cl_vx.y / w_vx;
                    const float b = cl_vx.z / w_vx;

                    // update colour buffer
                    dst[x] = rgb(r, g, b);

                    // update (w) depth buffer
                    depth[x] = w_vx;
                }
            }

            // step on x axis
            bc_vx += bc_sx;
            cl_vx += cl_sx;
            w_vx += w_sx;

        } // for (x axis)

        // step on y axis
        bc_vy += bc_sy;
        cl_vy += cl_sy;
        w_vy += w_sy;

        // step the buffers
        dst += pitch;
        depth += pitch;

    } // for (y axis)
}
コード例 #4
0
ファイル: DebugManager.cpp プロジェクト: jotak/shahnarman
// -----------------------------------------------------------------
// Name : autoStartGame
// -----------------------------------------------------------------
void DebugManager::autoStartGame()
{
    // Build client data
    int nbClients = 1;
    ClientData * clients = new ClientData[nbClients];
    int iClient = 0;
    clients[iClient].bLocal = true;

    // Re-init map data
    MapReader * pMapReader = new MapReader(m_pLocalClient);
    pMapReader->init("standard.lua");
    ObjectList * pMapParameters = new ObjectList(true);
    pMapReader->getMapParameters(pMapParameters, LABEL_MAX_CHARS);

    int * pCustomParams = NULL;
    if (pMapParameters->size > 0)
        pCustomParams = new int[pMapParameters->size];

    // Map custom parameters
    int i = 0;
    MapReader::MapParameters * pParam = (MapReader::MapParameters*) pMapParameters->getFirst(0);
    while (pParam != NULL)
    {
        pCustomParams[i++] = pParam->defaultValueIndex;
        pParam = (MapReader::MapParameters*) pMapParameters->getNext(0);
    }

    // Init map generator (we will not delete it here, as the pointer now belong to Server object)
    pMapReader->setMapParameters(pCustomParams, pMapParameters->size, 2);
    delete[] pCustomParams;
    MapReader::deleteMapParameters(pMapParameters);
    delete pMapParameters;

    // Init server
    Server * pServer = m_pLocalClient->initServer("", 1, clients, pMapReader, -1, -1);
    delete[] clients;
    if (pServer == NULL)
    {
        notifyErrorMessage("Error: server could not be initialized.");
        return;
    }

    // Build players data
    ObjectList * pServerPlayers = pServer->getSolver()->getPlayersList();
    // Create neutral player
    char sName[NAME_MAX_CHARS];
    i18n->getText("NEUTRA", sName, NAME_MAX_CHARS);
    Player * pPlayer = new Player(0, 0, pServer->getSolver()->getGlobalSpellsPtr());
    wsafecpy(pPlayer->m_sProfileName, NAME_MAX_CHARS, sName);
    pPlayer->m_Color = rgb(0.5, 0.5, 0.5);
    wsafecpy(pPlayer->m_sBanner, 64, "blason1");
    pServer->getSolver()->setNeutralPlayer(pPlayer);
    // Human players
    int playerId = 1;
    for (int fdfdf = 0; fdfdf < 2; fdfdf++)
    {
        // Create player object
        pPlayer = new Player(playerId, 0, pServer->getSolver()->getGlobalSpellsPtr());
        snprintf(pPlayer->m_sProfileName, NAME_MAX_CHARS, "test%d", playerId);
        Profile * pProfile = m_pLocalClient->getDataFactory()->findProfile(pPlayer->m_sProfileName);
        AvatarData * pAvatar = (AvatarData*) pProfile->getAvatarsList()->getFirst(0);
        pPlayer->m_Color = rgb(1, 1, 1);
        pAvatar->getBanner(pPlayer->m_sBanner, 64);
        pServerPlayers->addLast(pPlayer);
        // Set Avatar
        CoordsMap pos = pMapReader->getPlayerPosition(playerId-1);
        pServer->getSolver()->setInitialAvatar(pAvatar->clone(m_pLocalClient), pPlayer, pos);
        // Add spells that are equipped
        Profile::SpellData * pSpellDesc = (Profile::SpellData*) pProfile->getSpellsList()->getFirst(0);
        while (pSpellDesc != NULL)
        {
            AvatarData * pOwner = pSpellDesc->m_pOwner;
            if (pOwner != NULL && strcmp(pAvatar->m_sEdition, pOwner->m_sEdition) == 0
                    && strcmp(pAvatar->m_sObjectId, pOwner->m_sObjectId) == 0)
                pServer->getSolver()->addInitialPlayerSpell(pPlayer, pSpellDesc->m_sEdition, pSpellDesc->m_sName);
            pSpellDesc = (Profile::SpellData*) pProfile->getSpellsList()->getNext(0);
        }
        // Add equipped artifacts
        Artifact * pArtifact = (Artifact*) pProfile->getArtifactsList()->getFirst(0);
        while (pArtifact != NULL)
        {
            AvatarData * pOwner = pArtifact->m_pOwner;
            if (pOwner != NULL && strcmp(pAvatar->m_sEdition, pOwner->m_sEdition) == 0
                    && strcmp(pAvatar->m_sObjectId, pOwner->m_sObjectId) == 0)
            {
                Unit * pAvatarInGame = pPlayer->getAvatar();
                assert(pAvatarInGame != NULL);
                ArtifactEffect * pEffect = (ArtifactEffect*) pArtifact->getArtifactEffects()->getFirst(0);
                while (pEffect != NULL)
                {
                    switch (pEffect->getType())
                    {
                    case ARTIFACT_EFFECT_CHARAC:
                    {
                        bool bFound = true;
                        long val = pAvatarInGame->getValue(((ArtifactEffect_Charac*)pEffect)->m_sKey, false, &bFound);
                        if (bFound)
                            pAvatarInGame->setBaseValue(((ArtifactEffect_Charac*)pEffect)->m_sKey, max(0, val + ((ArtifactEffect_Charac*)pEffect)->m_iModifier));
                        else
                        {
                            char sError[1024];
                            snprintf(sError, 1024, "Warning: artifact %s tries to modify characteristic that doesn't exist (%s)", pArtifact->m_sObjectId, ((ArtifactEffect_Charac*)pEffect)->m_sKey);
                            m_pLocalClient->getDebug()->notifyErrorMessage(sError);
                        }
                        break;
                    }
                    case ARTIFACT_EFFECT_SPELL:
                    {
                        Spell * pSpell = m_pLocalClient->getDataFactory()->findSpell(((ArtifactEffect_Spell*)pEffect)->m_sSpellEdition, ((ArtifactEffect_Spell*)pEffect)->m_sSpellName);
                        if (pSpell != NULL)
                            pServer->getSolver()->addInitialPlayerSpell(pPlayer, ((ArtifactEffect_Spell*)pEffect)->m_sSpellEdition, ((ArtifactEffect_Spell*)pEffect)->m_sSpellName);
                        else
                        {
                            char sError[1024];
                            snprintf(sError, 1024, "Warning: artifact %s tries to add spell that doesn't exist (%s)", pArtifact->m_sObjectId, ((ArtifactEffect_Spell*)pEffect)->m_sSpellName);
                            m_pLocalClient->getDebug()->notifyErrorMessage(sError);
                        }
                        break;
                    }
                    case ARTIFACT_EFFECT_SKILL:
                    {
                        Skill * pSkill = new Skill(((ArtifactEffect_Skill*)pEffect)->m_sSkillEdition, ((ArtifactEffect_Skill*)pEffect)->m_sSkillName, ((ArtifactEffect_Skill*)pEffect)->m_sSkillParameters, pServer->getDebug());
                        if (pSkill != NULL && pSkill->isLoaded())
                            pAvatarInGame->addSkill(pSkill);
                        else
                        {
                            char sError[1024];
                            snprintf(sError, 1024, "Warning: artifact %s tries to add skill that doesn't exist or that can't be loaded (%s)", pArtifact->m_sObjectId, ((ArtifactEffect_Skill*)pEffect)->m_sSkillName);
                            m_pLocalClient->getDebug()->notifyErrorMessage(sError);
                        }
                        break;
                    }
                    }
                    pEffect = (ArtifactEffect*) pArtifact->getArtifactEffects()->getNext(0);
                }
            }
            pArtifact = (Artifact*) pProfile->getArtifactsList()->getNext(0);
        }
        playerId++;
    }
    delete pMapReader;
    pServer->onInitFinished();
}
コード例 #5
0
ファイル: main.cpp プロジェクト: giacomodabisias/R200_grabber
int main(int argc, char * argv[])
{
	std::cout << "press q to exit" << std::endl;
	R200Grabber grabber;
	const uint8_t nearColor[] = {255, 0, 0};
	const uint8_t farColor[] = {20, 40, 255};
	uint8_t cdepth[628 * 468 * 3];
	std::cout << "start" << std::endl;

	pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud = grabber.allocCloud();
	grabber.getCloudT(cloud);

	cloud->sensor_orientation_.w() = 0.0;
	cloud->sensor_orientation_.x() = 1.0;
	cloud->sensor_orientation_.y() = 0.0;
	cloud->sensor_orientation_.z() = 0.0;

	boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer(new pcl::visualization::PCLVisualizer ("3D Viewer"));
	viewer->setBackgroundColor(0, 0, 0);
	viewer->registerKeyboardCallback(KeyboardEventOccurred, (void*)&grabber);

	pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud);
	viewer->addPointCloud<pcl::PointXYZRGB>(cloud, rgb, "sample cloud");
	viewer->setPointCloudRenderingProperties(pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "sample cloud");

	cv::Mat limage(grabber.getLRSize(), CV_8UC1);
    cv::Mat rimage(grabber.getLRSize(), CV_8UC1);
    cv::Mat dimage(grabber.getDepthSize(), CV_8UC3);
    cv::Mat cimage(grabber.getColorSize(), CV_8UC3);

	double ftime;
	int frame;
	int dwidth = grabber.getDepthSize().width;
	int dheight = grabber.getDepthSize().height;
	bool first = true;	
	while(!viewer->wasStopped())
	{
		if(grabber.hasData() ){

		    ftime = grabber.getFrameTime();
		    frame = grabber.getFrameNumber();
		    
		    //std::cout << "Frame " << frame << " @ " << std::fixed << ftime << std::endl;
		    
		    const uint16_t * pd = grabber.getDepthImage();
		    const void * pl = grabber.getLImage();
		    const void * pr = grabber.getRImage();
		    const void * pc = grabber.getColorImageRGB();
		    /*
		    ConvertDepthToRGBUsingHistogram(pd, dwidth, dheight, nearColor, farColor, cdepth);

		    limage.data = (uint8_t *)pl;
		    rimage.data = (uint8_t *)pr;
		    dimage.data = (uint8_t *)cdepth;
		    cimage.data = (uint8_t *)pc;

    		cv::imshow("limage", limage);
    		cv::imshow("rimage", rimage);
    		cv::imshow("cimage", cimage);
    		cv::imshow("dimage", dimage);
			cv::waitKey(1);
			*/

    		std::chrono::high_resolution_clock::time_point tnow = std::chrono::high_resolution_clock::now();
    		grabber.getCloudT(cloud);
    		std::chrono::high_resolution_clock::time_point tpost = std::chrono::high_resolution_clock::now();
    		std::cout << "delta " << std::chrono::duration_cast<std::chrono::duration<double>>(tpost-tnow).count() * 1000 << std::endl;

    		//pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud);
    		viewer->updatePointCloud<pcl::PointXYZRGB>(cloud, rgb, "sample cloud");
    		viewer->spinOnce();
		}
	}

	return 0;
}
コード例 #6
0
void GeneralTransform::SegmentationAndUpdateFixedHomePos(std::vector<cv::Mat>& home_cloud, cv::Mat& query_cloud, cv::Mat& feature, int iteration_count)
{
    cv::Mat target_cloud, transformed_cloud;    
    int query_cloud_size = query_cloud.rows;
    int cloud_dim = home_cloud[0].cols;
    std::vector<cv::Mat> indices(num_joints_);
    std::vector<cv::Mat> min_dists(num_joints_);    
    int p_rates = 1e-1;

    // match different clouds, transformed by different weights, with the home cloud template...
	cv::flann::Index kd_trees(home_cloud_template_float_, cv::flann::KDTreeIndexParams(4), cvflann::FLANN_DIST_EUCLIDEAN); // build kd tree 
    for(int i = 0; i < num_joints_; i++)
    {       
        indices[i] = cv::Mat::zeros(query_cloud_size, 1, CV_32S);
        min_dists[i] = cv::Mat::zeros(query_cloud_size, 1, CV_32F);
        home_cloud[i].convertTo(transformed_cloud, CV_32F); 		        
        kd_trees.knnSearch(transformed_cloud, indices[i], min_dists[i], 1, cv::flann::SearchParams(64)); // kd tree search
    }

    /************* segmentation based on closest neighbor and update the probability according to distance *********************/

    // first accumulate the data...    
    for(int i = 0; i < query_cloud_size; i++)
    {
		int curr_idx_0 = indices[0].at<int>(i, 0);
		int curr_idx_1 = indices[1].at<int>(i, 0);
		// two joints here
		if(min_dists[0].at<float>(i, 0) < min_dists[1].at<float>(i, 0))
			vote_accumulation_.at<double>(curr_idx_0, 0) = vote_accumulation_.at<double>(curr_idx_0, 0) + 1;
		else
			vote_accumulation_.at<double>(curr_idx_0, 1) = vote_accumulation_.at<double>(curr_idx_0, 1) + 1;        
    }

    for(int i = 0; i < home_cloud_template_.rows; i++)
    {
		if(vote_accumulation_.at<double>(i, 0) == 0 && vote_accumulation_.at<double>(i, 1) == 0)
		{
			home_cloud_label_.at<double>(i, 0) = 0.5; home_cloud_label_.at<double>(i, 1) = 0.5;
		}
		else if(vote_accumulation_.at<double>(i, 0) == 0)
		{
			home_cloud_label_.at<double>(i, 0) = 0; home_cloud_label_.at<double>(i, 1) = 1;
		}
		else if(vote_accumulation_.at<double>(i, 1) == 0)
		{
			home_cloud_label_.at<double>(i, 0) = 1; home_cloud_label_.at<double>(i, 1) = 0;
		}
		else
		{
			double sum = vote_accumulation_.at<double>(i, 0) + vote_accumulation_.at<double>(i, 1);
			home_cloud_label_.at<double>(i, 0) = vote_accumulation_.at<double>(i, 0) / sum;
			home_cloud_label_.at<double>(i, 1) = vote_accumulation_.at<double>(i, 1) / sum;
		}       
    }

    // home_cloud_label_ = home_cloud_label_ + p_rates * (curr_probability - home_cloud_label_);

	if(iteration_count % 500 == 1)
    {       
        // just display the query cloud...		
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr colored_template;        
		colored_template.reset(new pcl::PointCloud<pcl::PointXYZRGB>());
		// colored_template->resize(home_cloud_template_.rows);
		for(int i = 0; i < home_cloud_template_.rows; i++)
		{
			pcl::PointXYZRGB point;
			point.x = home_cloud_template_.at<double>(i, 0);
			point.y = home_cloud_template_.at<double>(i, 1);
			point.z = home_cloud_template_.at<double>(i, 2);
			COLOUR c = GetColour(home_cloud_label_.at<double>(i, 0), 0.0, 1.0);
			point.r = c.r * 255.0;
			point.g = c.g * 255.0;
			point.b = c.b * 255.0;

			colored_template->push_back(point);						
		}                
		pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(colored_template);
        if(iteration_count == 1)
			viewer_->addPointCloud<pcl::PointXYZRGB>(colored_template, rgb, "template");        
        else
            viewer_->updatePointCloud<pcl::PointXYZRGB>(colored_template, rgb, "template");
        
        viewer_->spinOnce(10);
    }

	std::vector<int> segmentation_count(num_joints_);
    std::vector<cv::Mat> segmented_target_cloud(num_joints_);
    std::vector<cv::Mat> segmented_transformed_cloud(num_joints_);
    std::vector<cv::Mat> segmented_query_cloud(num_joints_);
    std::vector<cv::Mat> segmented_idx(num_joints_);
    // pre allocate
	std::cout << "pre-allocate..." << std::endl;
    for(int i = 0; i < num_joints_; i++)
    {
        segmentation_count[i] = 0; // query_cloud.rows;     
        segmented_idx[i] = cv::Mat::zeros(query_cloud_size, 2, CV_64F); // first column original idx, second column matched idx
    }
    // get the data... only work for two joints here...
	std::cout << "segment..." << std::endl;
    for(int i = 0; i < query_cloud_size; i++)
    {
        int min_idx = 0;
		// this line has bug...
		if(home_cloud_label_.at<double>(i, 0) > home_cloud_label_.at<double>(i, 1))
			min_idx = 0;
		else
			min_idx = 1;
        int pos = segmentation_count[min_idx];
        segmented_idx[min_idx].at<double>(pos, 0) = i; 
		segmented_idx[min_idx].at<double>(pos, 1) = indices[min_idx].at<int>(i, 0);          
        segmentation_count[min_idx]++;
    }   
	// update...
	std::cout << "separate data..." << std::endl;
    for(int i = 0; i < num_joints_; i++)
    {
        segmented_target_cloud[i] = cv::Mat::zeros(segmentation_count[i], cloud_dim, CV_64F);
        segmented_transformed_cloud[i] = cv::Mat::zeros(segmentation_count[i], cloud_dim, CV_64F);
        segmented_query_cloud[i] = cv::Mat::zeros(segmentation_count[i], cloud_dim, CV_64F);
        for(int j = 0; j < segmentation_count[i]; j++)
        {
            int query_pos = segmented_idx[i].at<double>(j, 0);
            int matched_pos = segmented_idx[i].at<double>(j, 1);
			if(matched_pos >= home_cloud_template_.rows || j >= segmented_transformed_cloud[i].rows || query_pos >= home_cloud[i].rows)
				std::cout << "matched pos not correct..." << std::endl;
            home_cloud[i].rowRange(query_pos, query_pos + 1).copyTo(segmented_transformed_cloud[i].rowRange(j, j + 1));
            query_cloud.rowRange(query_pos, query_pos + 1).copyTo(segmented_query_cloud[i].rowRange(j, j + 1));
			home_cloud_template_.rowRange(matched_pos, matched_pos + 1).copyTo(segmented_target_cloud[i].rowRange(j, j + 1));
        }
    }


    // CalcGradient(matched_template, home_cloud, query_cloud_list, feature, matched_probabilities);
	std::cout << "calculating gradient..." << std::endl;
	// CalcGradient(segmented_target_cloud, segmented_transformed_cloud, segmented_query_cloud, feature, segmentation_count);
	std::cout << "updating..." << std::endl;
    Update(iteration_count);
}
コード例 #7
0
ファイル: pcl_visualizer_crop.cpp プロジェクト: tfinley/ROS
boost::shared_ptr<pcl::visualization::PCLVisualizer> customColourVis (pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr cloudIn)
{

  //filter
  
  //Define your cube with two points in space:
  pcl::PointXYZ min_pt;
  min_pt.x=0.00;  // define minimum point x
  min_pt.y=0.00;  // define minimum point y
  min_pt.z=0.00;  // define minimum point z
  pcl::PointXYZ max_pt;
  max_pt.x=1.0;  // define max point x
  max_pt.y=1.0;  // define max point y
  max_pt.z=1.0;  // define max point z

  //Define translation and rotation ( this is optional)

        Eigen::Vector3f translation (min_pt.x+(max_pt.x-min_pt.x)/2,
                                                                        min_pt.y+(max_pt.y-min_pt.y)/2,
                                                                        min_pt.z+(max_pt.z-min_pt.z)/2);
        Eigen::Quaternionf rotation(0,0,0,0);


  /*
  I would recommend to create
                       Eigen::Affine3f boxTransform;
                      and use
                       cropFilter.setTransform(boxTransform);
          insted of using translation and rotation but it gives same result so it is your choise.

  
  */


//  pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloudOut (new pcl::PointCloud<pcl::PointXYZRGB>);

  pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_ptr_filter (new pcl::PointCloud<pcl::PointXYZRGB>);
  pcl::PointCloud<pcl::PointXYZRGB>& point_cloud_filter = *point_cloud_ptr_filter;

  pcl::CropBox<pcl::PointXYZRGB> cropFilter;
  cropFilter.setInputCloud (cloudIn);
  cropFilter.setMin(min_pt.getVector4fMap());
  cropFilter.setMax(max_pt.getVector4fMap());
  cropFilter.setTranslation(translation);
  //cropFilter.setRotation(rotation);

  cropFilter.filter (point_cloud_filter); 

  //end filter


  // --------------------------------------------
  // -----Open 3D viewer and add point cloud-----
  // --------------------------------------------

  boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer (new pcl::visualization::PCLVisualizer ("3D Viewer"));
  viewer->setBackgroundColor (0, 0, 0);
  pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(point_cloud_ptr_filter);
  viewer->addPointCloud<pcl::PointXYZRGB> (point_cloud_ptr_filter, rgb, "sample cloud");
  viewer->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 5, "sample cloud");
  //viewer->addCoordinateSystem (1.0);
  viewer->initCameraParameters ();
  viewer->registerPointPickingCallback (pp_callback, (void*)&viewer);

  viewer->addCube (translation,rotation,
                         fabs(max_pt.x-min_pt.x),
                         fabs(max_pt.y-min_pt.y),
                         fabs(max_pt.z-min_pt.z),
                         "cube");
  return (viewer);
}
コード例 #8
0
ファイル: display.hpp プロジェクト: nguyenducnhaty/wesnoth
	static Uint32 max_rgb(Uint32 first,Uint32 second)
		{ return rgb(std::max(red(first),red(second)),std::max(green(first),green(second)),std::max(blue(first),blue(second))) ; }
コード例 #9
0
ファイル: palette.cpp プロジェクト: 1060460048/mapnik
void rgba_palette::parse(std::string const& pal, palette_type type)
{
    unsigned length = pal.length();

    if ((type == PALETTE_RGBA && length % 4 > 0) ||
        (type == PALETTE_RGB && length % 3 > 0) ||
        (type == PALETTE_ACT && length != 772))
    {
        throw config_error("invalid palette length");
    }

    if (type == PALETTE_ACT)
    {
        length = (pal[768] << 8 | pal[769]) * 3;
    }

    sorted_pal_.clear();
    rgb_pal_.clear();
    alpha_pal_.clear();

    if (type == PALETTE_RGBA)
    {
        for (unsigned i = 0; i < length; i += 4)
        {
            sorted_pal_.push_back(rgba(pal[i], pal[i + 1], pal[i + 2], pal[i + 3]));
        }
    }
    else
    {
        for (unsigned i = 0; i < length; i += 3)
        {
            sorted_pal_.push_back(rgba(pal[i], pal[i + 1], pal[i + 2], 0xFF));
        }
    }

    // Make sure we have at least one entry in the palette.
    if (sorted_pal_.size() == 0)
    {
        sorted_pal_.push_back(rgba(0, 0, 0, 0));
    }

    colors_ = sorted_pal_.size();

#ifdef USE_DENSE_HASH_MAP
    color_hashmap_.resize((colors_*2));
#endif
    color_hashmap_.clear();

    // Sort palette for binary searching in quantization
    std::sort(sorted_pal_.begin(), sorted_pal_.end(), rgba::mean_sort_cmp());

    // Insert all palette colors into the hashmap and into the palette vectors.
    for (unsigned i = 0; i < colors_; i++)
    {
        rgba c = sorted_pal_[i];
        unsigned val = c.r | (c.g << 8) | (c.b << 16) | (c.a << 24);
        if (val != 0)
        {
            color_hashmap_[val] = i;
        }
        rgb_pal_.push_back(rgb(c));
        if (c.a < 0xFF)
        {
            alpha_pal_.push_back(c.a);
        }
    }
}
コード例 #10
0
ファイル: colorspacemanimpl.cpp プロジェクト: Rogaven/jagpdf
//
//      *  - type ... calgray, calrgb, cielab, icc, [palette]
//      *  - white   = x, z
//      *  - black   = x, y, z
//      *  - range   = amin, amax, bmin, bmax (cielab)
//      *  - gamma   = val [,val, val]        (calgray [,calrgb])
//      *  - matrix  = 9 vals                 (calrgb)
//      *  - profile = file-path              (icc based)
//      *  - name    = sRGB  .. some predefined name
IColorSpaceMan::cs_handle_pair_t
ColorSpaceManImpl::color_space_load(Char const* spec_str)
{
    ParseArgs pargs(&g_cs_kwds, &g_cs_names);
    ParsedResult pres(parse_options(spec_str, pargs));

    unsigned name = pres.explicit_value();
    if (name == ParsedResult::NO_EXPL_VALUE)
        throw exception_invalid_value(msg_invalid_cs_spec()) << JAGLOC;

    cs_str_spec_t spec(name, pres);
    cs_handle_pair_t result;

    switch (name)
    {
    case CSN_SRGB:
        result.first = register_icc_from_memory(binres::icc_srgb,
                                                binres::icc_srgb_size,
                                                3);
        break;

    case CSN_ADOBE_RGB:
        result.first = register_icc_from_memory(binres::icc_adobe_rgb,
                                                binres::icc_adobe_rgb_size,
                                                3);
        break;

    case CSN_BYID:
        if (!spec.id)
            throw exception_invalid_value(msg_invalid_cs_spec()) << JAGLOC;

        result.first = ColorSpaceHandle(
            handle_from_id<RESOURCE_COLOR_SPACE>(spec.id));
        break;

    case CSN_DEVICE_RGB:
        result.first = ColorSpaceHandle(CS_DEVICE_RGB);
        break;

    case CSN_DEVICE_GRAY:
        result.first = ColorSpaceHandle(CS_DEVICE_GRAY);
        break;

    case CSN_DEVICE_CMYK:
        result.first = ColorSpaceHandle(CS_DEVICE_CMYK);
        break;

    case CSN_CALGRAY:
    {
        intrusive_ptr<ICIECalGray> gray(define_calgray());
        set_cie_base(spec, *gray);
        if (!spec.gamma.empty())
            gray->gamma(spec.gamma[0]);

        result.first = color_space_load(gray);
        break;
    }

    case CSN_CALRGB:
    {
        intrusive_ptr<ICIECalRGB> rgb(define_calrgb());
        set_cie_base(spec, *rgb);
        if (!spec.gamma.empty())
        {
            JAG_ASSERT(spec.gamma.size()==3);
            rgb->gamma(spec.gamma[0], spec.gamma[1], spec.gamma[2]);
        }
        if (!spec.matrix.empty())
        {
            JAG_ASSERT(spec.matrix.size()==9);
            rgb->matrix(spec.matrix[0], spec.matrix[1], spec.matrix[2],
                        spec.matrix[3], spec.matrix[4], spec.matrix[5],
                        spec.matrix[6], spec.matrix[7], spec.matrix[8]);
        }
        result.first = color_space_load(rgb);
        break;
    }

    case CSN_CIELAB:
    {
        intrusive_ptr<ICIELab> lab(define_cielab());
        set_cie_base(spec, *lab);

        if (!spec.range.empty())
        {
            JAG_ASSERT(4 == spec.range.size());
            lab->range(spec.range[0], spec.range[1], spec.range[2], spec.range[3]);
        }
        result.first = color_space_load(lab);
        break;
    }

    case CSN_ICC:
    {
        if (-1==spec.components || spec.icc_profile.empty())
            throw exception_invalid_value(msg_invalid_cs_spec()) << JAGLOC;

        intrusive_ptr<IICCBased> icc(define_iccbased());
        icc->num_components(spec.components);
        icc->icc_profile(spec.icc_profile.c_str());
        result.first = color_space_load(icc);
        break;
    }


    default:
        ;
    } // switch


    JAG_ASSERT(is_valid(result.first));

    // is it palette?
    if (!spec.palette.empty())
    {
        intrusive_ptr<IPalette> indexed(define_indexed());
        indexed->set(
            id_from_handle<ColorSpace>(result.first),
            &spec.palette[0],
            static_cast<UInt>(spec.palette.size()));
        result.second = result.first;
        result.first = color_space_load(indexed);
    }

    return result;
}
コード例 #11
0
ファイル: bitmap.c プロジェクト: hailongfeng/huiyin
int getPixelAsInt(Bitmap* bitmap, int x, int y) {
    unsigned int pos = ((*bitmap).width * y) + x;

    return rgb((int)(*bitmap).red[pos], (int)(*bitmap).green[pos], (int)(*bitmap).blue[pos]);
}
コード例 #12
0
ファイル: imgviewer.c プロジェクト: klange/toaruos
void redraw() {
	static double r = 0.0;

	for (int y = 0; y < height; ++y) {
		for (int x = 0; x < width; ++x) {
			GFX(ctx,x+decor_left_width,y+decor_top_height) = (((y / 10) % 2 == 0) ^ ((x / 10) % 2 == 0)) ? rgb(107,107,107) : rgb(147,147,147);
		}
	}

	draw_sprite(ctx, &img, decor_left_width + width/2 - img.width/2, decor_top_height + height/2 - img.height/2);
	decors();
	flip(ctx);
	r += 0.02;
}
コード例 #13
0
void Play(void)
{
  // Cache the value of all pad buttons in one call.  This is less readable than checking
  // the Button(x) function per button, but it's also MUCH faster.  The Button(x) function
  // does some work internally, and doing it for all buttons at once instead of per-button
  // makes the frame rate much faster.  The (1<<x) notation you'll see after this is testing
  // for individual bits in the But variable, each corresponding to a single button state.
  int But = buttons();

  // switch modes between two player and self-playing if the OSH (center) button is pressed
  if( But & (1<<6) ) {
    AutoMode ^= 1;
    GameMode = 1;
    pause(50);
    return;
  }

  // If in two-player mode...
  if( AutoMode == 0 )
  {
    // Turn off the two middle LEDs - off indicates two player mode
    led(1,0);
    led(4,0);

    if( But & (1<<3) )
    {
      led(3,1);
      if( (Paddle[0] + PadStep) < Height )
        Paddle[0] += PadStep;
    }
    else
    {
      led(3,0);
    }    
    
    if( But & (1<<5) )
    {
      led(5,1);
      if( (Paddle[0] - PadStep) > -Height )
        Paddle[0] -= PadStep;
    }
    else
    {
      led(5,0);
    }    
  
  
    if( But & (1<<2) )
    {
      led(2,1);
      if( (Paddle[1] + PadStep) < Height )
        Paddle[1] += PadStep;
    }
    else
    {
      led(2,0);
    }    
  
    if( But & (1<<0) )
    {
      led(0,1);
      if( (Paddle[1] - PadStep) > -Height )
        Paddle[1] -= PadStep;
    }
    else
    {
      led(0,0);
    }
  }

  // If in self-playing mode...
  if( AutoMode == 1 )
  {
    // Turn on the two middle LEDs - on indicates self-playing mode
    led(1,1);
    led(4,1);

    if( BallDx < 0 ) {  // moving toward player 0
      if( BallY < Paddle[0] )
        Paddle[0] -= PadStep;
      if( BallY > Paddle[0] )
        Paddle[0] += PadStep;
    }

    if( BallDx > 0 ) {  // moving toward player 1
      if( BallY < Paddle[1] )
        Paddle[1] -= PadStep;
      if( BallY > Paddle[1] )
        Paddle[1] += PadStep;
    }
  }


  // Check to see if the ball is going to cross the left player paddle (Player 0)
  if( BallX > -PadRow && (BallX+BallDx) <= -PadRow )
  {
    if( BallY >= (Paddle[0]-PadLen) &&
        BallY <= (Paddle[0]+PadLen) &&
       (BallY+BallDy) >= (Paddle[0]-PadLen) &&
       (BallY+BallDy) <= (Paddle[0]+PadLen) )
     {
       BallDx = -BallDx;
     }         
  }


  // Check to see if the ball is going to cross the right player paddle (Player 1)
  if( BallX < PadRow && (BallX+BallDx) >= PadRow )
  {
    if( BallY >= (Paddle[1]-PadLen) &&
        BallY <= (Paddle[1]+PadLen) &&
       (BallY+BallDy) >= (Paddle[1]-PadLen) &&
       (BallY+BallDy) <= (Paddle[1]+PadLen) )
     {
       BallDx = -BallDx;
     }         
  }

  // Check for collisions against the side walls
  if( BallY <= -Height )
  {
    BallDy = -BallDy;
  }
  else if( BallY >= Height )
  {
    BallDy = -BallDy;
  }

  BallX += BallDx;
  BallY += BallDy;

  // Check to see if Player 0 has scored (ball has gone off the right)
  if( BallX > Width )
  {
    // Score player 0
    rgb(R,RED);
    Score[0]++;
    BallX = BallY = 0;
    pause(100);
    rgb(R,OFF);

    if( Score[0] == 10 )
    {
      AutoMode = 1;
      GameMode = 1;
    }      
  }
  // Check to see if Player 1 has scored (ball has gone off the left)
  else if( BallX < -Width )
  {
    // Score player 1
    rgb(L,RED);
    Score[1]++;
    BallX = BallY = 0;
    pause(100);
    rgb(L,OFF);

    if( Score[1] == 10 )
    {
      AutoMode = 1;
      GameMode = 1;
    }      
  }
}
コード例 #14
0
 Patch_ptr MeterMeasurement::measure(int r, int g, int b,
                                     const string_ptr patchName)
 {
   RGB_ptr rgb(new RGBColor(r, g, b));
   return measure(rgb, patchName);
 };
コード例 #15
0
int main (int argc, char * argv[])
{
  IplImage* camera = 0;

  try {
    // コンテキストの初期化
    xn::Context context;
    XnStatus rc = context.InitFromXmlFile(CONFIG_XML_PATH);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // イメージジェネレータの作成
    xn::ImageGenerator image;
    rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // デプスジェネレータの作成
    xn::DepthGenerator depth;
    rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // デプスの座標をイメージに合わせる
    depth.GetAlternativeViewPointCap().SetViewPoint(image);

    // ユーザーの作成
    xn::UserGenerator user;
    rc = context.FindExistingNode( XN_NODE_TYPE_USER, user );
    if ( rc != XN_STATUS_OK ) {
      rc = user.Create(context);
      if ( rc != XN_STATUS_OK ) {
        throw std::runtime_error( xnGetStatusString( rc ) );
      }
    }

    // ユーザー検出機能をサポートしているか確認
    if (!user.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
      throw std::runtime_error("ユーザー検出をサポートしてません");
    }

    XnCallbackHandle userCallbacks, calibrationCallbacks, poseCallbacks;
    XnChar pose[20] = "";

    // キャリブレーションにポーズが必要
    xn::SkeletonCapability skelton = user.GetSkeletonCap();
    if (skelton.NeedPoseForCalibration()) {
      // ポーズ検出のサポートチェック
      if (!user.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
        throw std::runtime_error("ポーズ検出をサポートしてません");
      }

      // キャリブレーションポーズの取得
      skelton.GetCalibrationPose(pose);

      // ポーズ検出のコールバックを登録
      xn::PoseDetectionCapability pose = user.GetPoseDetectionCap();
      pose.RegisterToPoseCallbacks(&::PoseDetected, &::PoseLost,
        &user, poseCallbacks);
    }

    // ユーザー認識のコールバックを登録
    user.RegisterUserCallbacks(&::UserDetected, &::UserLost, pose,
      userCallbacks);

    // キャリブレーションのコールバックを登録
    skelton.RegisterCalibrationCallbacks(&::CalibrationStart, &::CalibrationEnd,
      &user, calibrationCallbacks);

    // ユーザートラッキングで、すべてをトラッキングする
    skelton.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

    // ジェスチャー検出の開始
    context.StartGeneratingAll();

    // カメラサイズのイメージを作成(8bitのRGB)
    XnMapOutputMode outputMode;
    image.GetMapOutputMode(outputMode);
    camera = ::cvCreateImage(cvSize(outputMode.nXRes, outputMode.nYRes),
      IPL_DEPTH_8U, 3);
    if (!camera) {
      throw std::runtime_error("error : cvCreateImage");
    }

    // 表示状態
    bool isShowImage = true;
    bool isShowUser = true;
    bool isShowSkelton = true;

    // メインループ
    while (1) {
      // すべてのノードの更新を待つ
      context.WaitAndUpdateAll();

      // 画像データの取得
      xn::ImageMetaData imageMD;
      image.GetMetaData(imageMD);

      // ユーザーデータの取得
      xn::SceneMetaData sceneMD;
      user.GetUserPixels(0, sceneMD);

      // カメラ画像の表示
      char* dest = camera->imageData;
      const xn::RGB24Map& rgb = imageMD.RGB24Map();
      for (int y = 0; y < imageMD.YRes(); ++y) {
        for (int x = 0; x < imageMD.XRes(); ++x) {
          // ユーザー表示
          XnLabel label = sceneMD(x, y);
          if (!isShowUser) {
            label = 0;
          }

          // カメラ画像の表示
          XnRGB24Pixel pixel = rgb(x, y);
          if (!isShowImage) {
            pixel = xnRGB24Pixel( 255, 255, 255 );
          }

          // 出力先に描画
          dest[0] = pixel.nRed   * Colors[label][0];
          dest[1] = pixel.nGreen * Colors[label][1];
          dest[2] = pixel.nBlue  * Colors[label][2];
          dest += 3;
        }
      }

      // スケルトンの描画
      if (isShowSkelton) {
        XnUserID aUsers[15];
        XnUInt16 nUsers = 15;
        user.GetUsers(aUsers, nUsers);
        for (int i = 0; i < nUsers; ++i) {
          if (skelton.IsTracking(aUsers[i])) {
            SkeltonDrawer skeltonDrawer(camera, skelton,
              depth, aUsers[i]);
            skeltonDrawer.draw();

            PoseDetector pose(camera, skelton, depth, aUsers[i]);
            bool isCross = pose.detect();
          }
        }
      }

      ::cvCvtColor(camera, camera, CV_BGR2RGB);
      ::cvShowImage("KinectImage", camera);

      // キーイベント
      char key = cvWaitKey(10);
      // 終了する
      if (key == 'q') {
        break;
      }
      // 表示する/しないの切り替え
      else if (key == 'i') {
        isShowImage = !isShowImage;
      }
      else if (key == 'u') {
        isShowUser = !isShowUser;
      }
      else if (key == 's') {
        isShowSkelton = !isShowSkelton;
      }
    }
  }
  catch (std::exception& ex) {
    std::cout << ex.what() << std::endl;
  }

  ::cvReleaseImage(&camera);

  return 0;
}
コード例 #16
0
ファイル: guys.c プロジェクト: amiel/random-useless-scripts
void set_color(guy* tmp, int r, int b, int g){
    tmp->color = rgb(r,g,b);
    tmp->color_r = r;
    tmp->color_b = b;
    tmp->color_g = g;
}
コード例 #17
0
ファイル: panel.c プロジェクト: Razi007/toaruos
int main (int argc, char ** argv) {
	setup_windowing();

	int width  = wins_globals->server_width;
	int height = wins_globals->server_height;

	win_width = width;
	win_height = height;

	init_shmemfonts();
	set_font_size(14);

	/* Create the panel */
	window_t * panel = window_create(0, 0, width, PANEL_HEIGHT);
	window_reorder (panel, 0xFFFF);
	ctx = init_graphics_window_double_buffer(panel);
	draw_fill(ctx, rgba(0,0,0,0));
	flip(ctx);

	init_sprite_png(0, "/usr/share/panel.png");
	init_sprite_png(1, "/usr/share/icons/panel-shutdown.png");

	for (uint32_t i = 0; i < width; i += sprites[0]->width) {
		draw_sprite(ctx, sprites[0], i, 0);
	}

	size_t buf_size = panel->width * panel->height * sizeof(uint32_t);
	char * buf = malloc(buf_size);
	memcpy(buf, ctx->backbuffer, buf_size);

	flip(ctx);

	struct timeval now;
	int last = 0;
	struct tm * timeinfo;
	char   buffer[80];

	struct utsname u;
	uname(&u);

	/* UTF-8 Strings FTW! */
	uint8_t * os_name_ = "とあるOS";
	uint8_t final[512];
	uint32_t l = snprintf(final, 512, "%s %s", os_name_, u.release);

	syscall_signal(2, sig_int);

	/* Enable mouse */
	win_use_threaded_handler();
	mouse_action_callback = panel_check_click;

	while (_continue) {
		/* Redraw the background by memcpy (super speedy) */
		memcpy(ctx->backbuffer, buf, buf_size);
		syscall_gettimeofday(&now, NULL); //time(NULL);
		if (now.tv_sec != last) {
			last = now.tv_sec;
			timeinfo = localtime((time_t *)&now.tv_sec);
			strftime(buffer, 80, "%I:%M:%S %p", timeinfo);

			draw_string(ctx, width - 120, 17, rgb(255,255,255), buffer);
			draw_string(ctx, 10, 17, rgb(255,255,255), final);

			draw_sprite(ctx, sprites[1], win_width - 23, 1); /* Logout button */

			flip(ctx);
		}
		syscall_nanosleep(0,50);
	}
コード例 #18
0
ファイル: s3c24x0_lcd.c プロジェクト: lacombar/netbsd-alc
		if (scr->nsegs)
			bus_dmamem_free(sc->dma_tag, scr->segs, scr->nsegs);
		free(scr, M_DEVBUF);
	}
	return NULL;
}


#define _rgb(r,g,b)	(((r)<<11) | ((g)<<5) | b)
#define rgb(r,g,b)	_rgb((r)>>1,g,(b)>>1)

#define L	0x30			/* low intensity */
#define H	0x3f			/* hight intensity */

static const uint16_t basic_color_map[] = {
	rgb(	0,   0,   0),		/* black */
	rgb(	L,   0,   0),		/* red */
	rgb(	0,   L,   0),		/* green */
	rgb(	L,   L,   0),		/* brown */
	rgb(	0,   0,   L),		/* blue */
	rgb(	L,   0,   L),		/* magenta */
	rgb(	0,   L,   L),		/* cyan */
	_rgb(0x1c,0x38,0x1c),		/* white */

	rgb(	L,   L,   L),		/* black */
	rgb(	H,   0,   0),		/* red */
	rgb(	0,   H,   0),		/* green */
	rgb(	H,   H,   0),		/* brown */
	rgb(	0,   0,   H),		/* blue */
	rgb(	H,   0,   H),		/* magenta */
	rgb(	0,   H,   H),		/* cyan */
コード例 #19
0
int
main (int argc, char** argv)
{
//     ros::init(argc, argv, "extract_sec");
//     ros::NodeHandle node_handle;

//     pcl::visualization::PCLVisualizer result_viewer("planar_segmentation");
    boost::shared_ptr<pcl::visualization::PCLVisualizer> result_viewer (new pcl::visualization::PCLVisualizer ("planar_segmentation"));
    result_viewer->addCoordinateSystem(0.3, "reference", 0);
    result_viewer->setCameraPosition(-0.499437, 0.111597, -0.758007, -0.443141, 0.0788583, -0.502855, -0.034703, -0.992209, -0.119654);
    result_viewer->setCameraClipDistances(0.739005, 2.81526);
//     result_viewer->setCameraPosition(Position, Focal point, View up);
//     result_viewer->setCameraClipDistances(Clipping plane);

    /***************************************
    *  parse arguments
    ***************************************/
    if(argc<5)
    {
        pcl::console::print_info("Usage: extract_sec DATA_PATH/PCD_FILE_FORMAT START_INDEX END_INDEX DEMO_NAME (opt)STEP_SIZE(1)");
        exit(1);
    }

    int view_id=0;
    int step=1;
    std::string basename_cloud=argv[1];
    unsigned int index_start = std::atoi(argv[2]);
    unsigned int index_end = std::atoi(argv[3]);
    std::string demo_name=argv[4];
    if(argc>5) step=std::atoi(argv[5]);

    /***************************************
    *  set up result directory
    ***************************************/
    mkdir("/home/zhen/Documents/Dataset/human_result", S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
    char result_folder[50];
    std::snprintf(result_folder, sizeof(result_folder), "/home/zhen/Documents/Dataset/human_result/%s", demo_name.c_str());
    mkdir(result_folder, S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);

    std::string basename_pcd = (basename_cloud.find(".pcd") == std::string::npos) ? (basename_cloud + ".pcd") : basename_cloud;
    std::string filename_pcd;

    std::string mainGraph_file;
    mainGraph_file = std::string(result_folder) + "/mainGraph.txt";

    // write video config
    char video_file[100];
    std::snprintf(video_file, sizeof(video_file), "%s/video.txt", result_folder);
    std::ofstream video_config(video_file);
    if (video_config.is_open())
    {
        video_config << index_start << " " << index_end << " " << demo_name << " " << step;
        video_config.close();
    }

    /***************************************
    *  set up cloud, segmentation, tracker, detectors, graph, features
    ***************************************/
    TableObject::Segmentation tableObjSeg;
    TableObject::Segmentation initialSeg;
    TableObject::track3D tracker(false);
    TableObject::colorDetector finger1Detector(0,100,0,100,100,200); //right
    TableObject::colorDetector finger2Detector(150,250,0,100,0,100); //left
    TableObject::touchDetector touchDetector(0.01);
    TableObject::bottleDetector bottleDetector;

    TableObject::mainGraph mainGraph((int)index_start);

    std::vector<manipulation_features> record_features;
    manipulation_features cur_features;

    TableObject::pcdCloud pcdSceneCloud;
    CloudPtr sceneCloud;
    CloudPtr planeCloud(new Cloud);
    CloudPtr cloud_objects(new Cloud);
    CloudPtr cloud_finger1(new Cloud);
    CloudPtr cloud_finger2(new Cloud);
    CloudPtr cloud_hull(new Cloud);
    CloudPtr track_target(new Cloud);
    CloudPtr tracked_cloud(new Cloud);

    std::vector<pcl::PointIndices> clusters;
    pcl::ModelCoefficients coefficients;
    pcl::PointIndices f1_indices;
    pcl::PointIndices f2_indices;
    Eigen::Affine3f toBottleCoordinate;
    Eigen::Affine3f transformation;

    Eigen::Vector3f bottle_init_ori;

    // set threshold of size of clustered cloud
    tableObjSeg.setThreshold(30);
    initialSeg.setThreshold(500);

    // downsampler
    pcl::ApproximateVoxelGrid<pcl::PointXYZRGBA> grid;
    float leaf_size=0.005;
    grid.setLeafSize (leaf_size, leaf_size, leaf_size);

    /***************************************
    *  start processing
    ***************************************/
    unsigned int idx = index_start;
    int video_id=0;
    bool change = false;
    while( idx <= index_end && !result_viewer->wasStopped())
    {
        std::cout << std::endl;
        std::cout << "frame id=" << idx << std::endl;
        filename_pcd = cv::format(basename_cloud.c_str(), idx);

        if(idx==index_start) {
            /***************************************
             *  Intialization:
             * -plane localization
             * -object cluster extraction
             * -bottle cluster localization
             ***************************************/
            initialSeg.resetCloud(filename_pcd, false);
            initialSeg.seg(false);
            initialSeg.getObjects(cloud_objects, clusters);
            initialSeg.getCloudHull(cloud_hull);
            initialSeg.getPlaneCoefficients(coefficients);

            initialSeg.getsceneCloud(pcdSceneCloud);
            initialSeg.getTableTopCloud(planeCloud);
            sceneCloud=pcdSceneCloud.getCloud();

            /***************************************
             *  fingertip, hand_arm removal
             ***************************************/
            //opencv color filtering for fingertip_1
            {
                pcl::ScopeTime t_finger1("Finger 1(blue) detection");
                finger1Detector.setInputCloud(cloud_objects, clusters);
                finger1Detector.filter(f1_indices,cloud_finger1);
            }
            finger1Detector.showDetectedCloud(result_viewer, "finger1");

            //opencv color filtering for fingertip_2
            {
                pcl::ScopeTime t_finger2("Finger 2(orange) detection");
                finger2Detector.setInputCloud(cloud_objects, clusters);
                finger2Detector.filter(f2_indices,cloud_finger2);
            }
            finger2Detector.showDetectedCloud(result_viewer, "finger2");

            // remove hand (include cluster that contains the detected fingertips and also the other clusters that are touching the cluster)
            std::vector<int> hand_arm1=TableObject::findHand(cloud_objects, clusters, f1_indices);

            for(int i=hand_arm1.size()-1; i>=0; i--)
            {
                clusters.erase(clusters.begin()+hand_arm1[i]);
                std::cout << "removing hand_arm : cluster index = " << hand_arm1[i] << std::endl;
            }
            std::vector<int> hand_arm2=TableObject::findHand(cloud_objects, clusters, f2_indices);
            for(int i=hand_arm2.size()-1; i>=0; i--)
            {
                clusters.erase(clusters.begin()+hand_arm2[i]);
                std::cout << "removing hand_arm : cluster index = " << hand_arm2[i] << std::endl;
            }

// DEBUG
// pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBA> plane(planeCloud);
// result_viewer->addPointCloud<RefPointType>(planeCloud, plane, "tabletop");
// CloudPtr debug(new Cloud);
// initialSeg.getOutPlaneCloud(debug);
// pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBA> out_plane(debug);
// result_viewer->addPointCloud<RefPointType>(debug, out_plane, "out_plane");

// choose bottle_id at 1st frame & confirm fitted model is correct
            TableObject::view3D::drawClusters(result_viewer, cloud_objects, clusters, true);

            while (!result_viewer->wasStopped ())
            {
                result_viewer->spinOnce (100);
                boost::this_thread::sleep (boost::posix_time::microseconds (100000));
            }

            std::cout << "cluster size = " << clusters.size() << std::endl;
            /***************************************
             *  Localizing cylinder
             ***************************************/
            CloudPtr cluster_bottle (new Cloud);
            int bottle_id = 0;
            pcl::copyPointCloud (*cloud_objects, clusters[bottle_id], *cluster_bottle);
            bottleDetector.setInputCloud(cluster_bottle);
            bottleDetector.fit();
            bottleDetector.getTransformation(toBottleCoordinate);
            bottle_init_ori= bottleDetector.getOrientation();

            float x, y, z, roll, pitch, yaw;
            pcl::getTranslationAndEulerAngles(toBottleCoordinate.inverse(), x, y, z, roll, pitch, yaw);

            result_viewer->removeCoordinateSystem("reference", 0);
            result_viewer->addCoordinateSystem(0.3, toBottleCoordinate.inverse(), "reference", 0);
            bottleDetector.drawOrientation(result_viewer);

            /***************************************
             *  Record features
             ***************************************/
            bottle_features cur_bottle_features;
            cur_bottle_features.loc[0] = x;
            cur_bottle_features.loc[1] = y;
            cur_bottle_features.loc[2] = z;
            cur_bottle_features.ori[0] = roll;
            cur_bottle_features.ori[1] = pitch;
            cur_bottle_features.ori[2] = yaw;
            cur_bottle_features.color[0] = bottleDetector.getCenter().r;
            cur_bottle_features.color[1] = bottleDetector.getCenter().g;
            cur_bottle_features.color[2] = bottleDetector.getCenter().b;
            cur_bottle_features.size[0] = bottleDetector.getHeight();
            cur_bottle_features.size[1] = bottleDetector.getRadius();
            cur_features.bottle = cur_bottle_features;

            pcl::PointXYZ center_finger1 = TableObject::computeObjCentroid(cloud_finger1);
            pcl::PointXYZ center_finger2 = TableObject::computeObjCentroid(cloud_finger2);
            center_finger1 = pcl::transformPoint<pcl::PointXYZ>(center_finger1, toBottleCoordinate);
            center_finger2 = pcl::transformPoint<pcl::PointXYZ>(center_finger2, toBottleCoordinate);
            cur_features.gripper_1.loc[0] = center_finger1.x;
            cur_features.gripper_1.loc[1] = center_finger1.y;
            cur_features.gripper_1.loc[2] = center_finger1.z;
            cur_features.gripper_2.loc[0] = center_finger2.x;
            cur_features.gripper_2.loc[1] = center_finger2.y;
            cur_features.gripper_2.loc[2] = center_finger2.z;

            record_features.push_back(cur_features);

            /***************************************
             *  Tracking initialization
             ***************************************/
            {
                pcl::ScopeTime t_track("Tracker initialization");
                tracker.setTarget(cluster_bottle, bottleDetector.getCenter());
                tracker.initialize();
            }

            /***************************************
             *  Touch detection
             ***************************************/
            std::vector<CloudPtr> touch_clouds;
            touch_clouds.push_back(cluster_bottle);
            touch_clouds.push_back(cloud_finger1);
            touch_clouds.push_back(cloud_finger2);

            // touch detection between each pair of objects (including fingertips, tabletop objects and tabletop)
            for(int i=0; i<touch_clouds.size(); i++)
            {
                int j;
                bool touch;
                for(j=i+1; j<touch_clouds.size(); j++)
                {
                    // touch detection between object_i and object_j
                    char relation [50];
                    std::sprintf(relation, "object%d_object%d", i, j);
                    std::cout << relation << std::endl;

                    {
                        pcl::ScopeTime t("Touch detection");
                        touch=touchDetector.detect(touch_clouds[i], touch_clouds[j]);
                    }
//                     touchDetector.showTouch(result_viewer, relation, 100+250*(j-i-1), 40+20*i);

                    // relational scene graph -> main graph
                    if(touch) {
                        mainGraph.addInitialRelationalGraph(2);
                    } else {
                        mainGraph.addInitialRelationalGraph(0);
                    }
                }

                // touch detection between each objects and tabletop
                char relation [50];
                std::sprintf (relation, "object%d_object%d", i, (int)touch_clouds.size());
                std::cout << relation << std::endl;
                {
                    pcl::ScopeTime t("Touch detection");
                    touch=touchDetector.detectTableTouch(touch_clouds[i], coefficients);
                }
//                 touchDetector.showTouch(result_viewer, relation, 100+250*(j-i-1), 40+20*i);

                // relational scene graph -> main graph
                if(touch) {
                    mainGraph.addInitialRelationalGraph(2);
                } else {
                    mainGraph.addInitialRelationalGraph(0);
                }
            }

            /***************************************
             *  Visualization
             ***************************************/
            // draw extracted object clusters
//             TableObject::view3D::drawClusters(result_viewer, cloud_objects, touch_clusters);

            // draw extracted plane points
//             pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBA> plane(planeCloud);
//             result_viewer->addPointCloud<RefPointType>(planeCloud, plane, "tabletop");
//             std::stringstream ss;
//             ss << (int)touch_clusters.size();
//             result_viewer->addText3D(ss.str(), planeCloud->points.at(334*640+78),0.1);

            // draw extracted plane contour polygon
            result_viewer->addPolygon<RefPointType>(cloud_hull, 0, 255, 0, "polygon");

            change = true;
        } else
        {
            /***************************************
             *  object cloud extraction
             ***************************************/
            tableObjSeg.resetCloud(filename_pcd, false);
            tableObjSeg.seg(cloud_hull,false);
            tableObjSeg.getObjects(cloud_objects, clusters);
            tableObjSeg.getsceneCloud(pcdSceneCloud);
            sceneCloud=pcdSceneCloud.getCloud();

            /***************************************
             *  fingertip extraction
             ***************************************/
            //opencv color filtering for fingertip_1
            {
                pcl::ScopeTime t_finger1("Finger 1(blue) detection");
                finger1Detector.setInputCloud(cloud_objects, clusters);
                finger1Detector.filter(f1_indices,cloud_finger1);
            }
            finger1Detector.showDetectedCloud(result_viewer, "finger1");

            //opencv color filtering for fingertip_2
            {
                pcl::ScopeTime t_finger1("Finger 2(orange) detection");
                finger2Detector.setInputCloud(cloud_objects, clusters);
                finger2Detector.filter(f2_indices,cloud_finger2);
            }
            finger2Detector.showDetectedCloud(result_viewer, "finger2");

            /***************************************
             *  filter out black glove cluster & gray sleeve, also update cloud_objects with removed cluster indices
             ***************************************/
            for(int i=0; i<clusters.size(); i++)
            {
                pcl::CentroidPoint<RefPointType> color_points;
                for(int j=0; j<clusters[i].indices.size(); j++)
                {
                    color_points.add(cloud_objects->at(clusters[i].indices[j]));
                }
                RefPointType mean_color;
                color_points.get(mean_color);
                if(mean_color.r>30 & mean_color.r<70 & mean_color.g>30 & mean_color.g<70 & mean_color.b>30 & mean_color.b<70)
                {
                    clusters.erase(clusters.begin()+ i);
                    i=i-1;
                }
            }

            /***************************************
             *  Tracking objects
             ***************************************/
            {
                pcl::ScopeTime t_track("Tracking");
                grid.setInputCloud (sceneCloud);
                grid.filter (*track_target);
                tracker.track(track_target, transformation);
                tracker.getTrackedCloud(tracked_cloud);
            }
            tracker.viewTrackedCloud(result_viewer);
//             tracker.drawParticles(result_viewer);

            /***************************************
             *  compute tracked <center, orientation>
             ***************************************/
            pcl::PointXYZ bottle_loc_point(0,0,0);
            bottle_loc_point = pcl::transformPoint<pcl::PointXYZ>(bottle_loc_point, transformation);
            result_viewer->removeShape("bottle_center");
//             result_viewer->addSphere<pcl::PointXYZ>(bottle_loc_point, 0.05, "bottle_center");

            Eigen::Vector3f bottle_ori;
            pcl::transformVector(bottle_init_ori,bottle_ori,transformation);
            TableObject::view3D::drawArrow(result_viewer, bottle_loc_point, bottle_ori, "bottle_arrow");

            /***************************************
             *  calculate toTrackedBottleCoordinate
             ***************************************/
            Eigen::Affine3f toTrackedBottleCoordinate;
            Eigen::Vector3f p( bottle_loc_point.x, bottle_loc_point.y, bottle_loc_point.z ); // position

            // get a vector that is orthogonal to _orientation ( yc = _orientation x [1,0,0]' )
            Eigen::Vector3f yc( 0, bottle_ori[2], -bottle_ori[1] );
            yc.normalize();
            // get a transform that rotates _orientation into z and moves cloud into origin.
            pcl::getTransformationFromTwoUnitVectorsAndOrigin(yc, bottle_ori, p, toTrackedBottleCoordinate);
            result_viewer->removeCoordinateSystem("reference");
            result_viewer->addCoordinateSystem(0.3, toTrackedBottleCoordinate.inverse(), "reference", 0);

            float x, y, z, roll, pitch, yaw;
            pcl::getTranslationAndEulerAngles(toTrackedBottleCoordinate.inverse(), x, y, z, roll, pitch, yaw);

            /***************************************
            *  setup bottle feature
            ***************************************/
            cur_features = record_features[video_id-1];
            cur_features.bottle.loc[0] = x;
            cur_features.bottle.loc[1] = y;
            cur_features.bottle.loc[2] = z;
            cur_features.bottle.ori[0] = roll;
            cur_features.bottle.ori[1] = pitch;
            cur_features.bottle.ori[2] = yaw;

            pcl::PointXYZ center_finger1 = TableObject::computeObjCentroid(cloud_finger1);
            pcl::PointXYZ center_finger2 = TableObject::computeObjCentroid(cloud_finger2);
            center_finger1 = pcl::transformPoint<pcl::PointXYZ>(center_finger1, toTrackedBottleCoordinate);
            center_finger2 = pcl::transformPoint<pcl::PointXYZ>(center_finger2, toTrackedBottleCoordinate);
            cur_features.gripper_1.loc[0] = center_finger1.x;
            cur_features.gripper_1.loc[1] = center_finger1.y;
            cur_features.gripper_1.loc[2] = center_finger1.z;
            cur_features.gripper_2.loc[0] = center_finger2.x;
            cur_features.gripper_2.loc[1] = center_finger2.y;
            cur_features.gripper_2.loc[2] = center_finger2.z;

            record_features.push_back(cur_features);



            /***************************************
             *  Touch detection
             ***************************************/
            std::vector<CloudPtr> touch_clouds;
            touch_clouds.push_back(tracked_cloud);
            touch_clouds.push_back(cloud_finger1);
            touch_clouds.push_back(cloud_finger2);

            // touch detection between each pair of objects (including fingertips, tabletop objects and tabletop)
            for(int i=0; i<touch_clouds.size(); i++)
            {
                int j;
                bool touch;
                for(j=i+1; j<touch_clouds.size(); j++)
                {
                    // touch detection between object_i and object_j
                    char relation [50];
                    std::sprintf(relation, "object%d_object%d", i, j);
                    std::cout << relation << std::endl;

                    {
                        pcl::ScopeTime t("Touch detection");
                        touch=touchDetector.detect(touch_clouds[i], touch_clouds[j]);
                    }
//                     touchDetector.showTouch(result_viewer, relation, 100+250*(j-i-1), 40+20*i);

                    // relational scene graph -> main graph
                    if(touch) {
                        mainGraph.addRelationalGraph(2);
                    } else {
                        mainGraph.addRelationalGraph(0);
                    }
                }

                // touch detection between each objects and tabletop
                char relation [50];
                std::sprintf (relation, "object%d_object%d", i, (int)touch_clouds.size());
                std::cout << relation << std::endl;
                {
                    pcl::ScopeTime t("Touch detection");
                    touch=touchDetector.detectTableTouch(touch_clouds[i], coefficients);
                }
//                 touchDetector.showTouch(result_viewer, relation, 100+250*(j-i-1), 40+20*i);

                // relational scene graph -> main graph
                if(touch) {
                    mainGraph.addRelationalGraph(2);
                } else {
                    mainGraph.addRelationalGraph(0);
                }
            }

            /***************************************
             *  Visualization
             ***************************************/
            // draw extracted point clusters
//             TableObject::view3D::drawText(result_viewer, cloud_objects, touch_clusters);

            /***************************************
             *  Main Graph
             ***************************************/
            change = mainGraph.compareRelationGraph((int)idx);
        }

        // darw original cloud
        pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGBA> rgb(sceneCloud);
        if(!result_viewer->updatePointCloud<RefPointType>(sceneCloud, rgb, "new frame"))
            result_viewer->addPointCloud<RefPointType>(sceneCloud, rgb, "new frame");

        result_viewer->spinOnce (100);
        boost::this_thread::sleep (boost::posix_time::microseconds (100000));


        //debug
        std::cout << cur_features.bottle.loc[0] << " " << cur_features.bottle.loc[1] << " " << cur_features.bottle.loc[2]
                  << " " << cur_features.bottle.ori[0] << " " << cur_features.bottle.ori[1] << " " << cur_features.bottle.ori[2]
                  << " " << cur_features.bottle.color[0] << " " << cur_features.bottle.color[1] << " " << cur_features.bottle.color[2]
                  << " " << cur_features.bottle.size[0] << " " << cur_features.bottle.size[1]
                  << " " << cur_features.gripper_1.loc[0] << " " << cur_features.gripper_1.loc[1] << " " << cur_features.gripper_1.loc[2]
                  << " " << cur_features.gripper_2.loc[0] << " " << cur_features.gripper_2.loc[1] << " " << cur_features.gripper_2.loc[2]
                  << std::endl;

        if(change)
        {
            char screenshot[100]; // make sure it's big enough
            std::snprintf(screenshot, sizeof(screenshot), "%s/sec_%d.png", result_folder, (int)idx);
            std::cout << screenshot << std::endl;
            result_viewer->saveScreenshot(screenshot);

            //record features
            char feature_file[100]; // make sure it's big enough
            std::snprintf(feature_file, sizeof(feature_file), "%s/features_original.txt", result_folder);
            std::ofstream feature_writer(feature_file, std::ofstream::out | std::ofstream::app);
            feature_writer << cur_features.bottle.loc[0] << " " << cur_features.bottle.loc[1] << " " << cur_features.bottle.loc[2]
                           << " " << cur_features.bottle.ori[0] << " " << cur_features.bottle.ori[1] << " " << cur_features.bottle.ori[2]
                           << " " << cur_features.bottle.color[0] << " " << cur_features.bottle.color[1] << " " << cur_features.bottle.color[2]
                           << " " << cur_features.bottle.size[0] << " " << cur_features.bottle.size[1]
                           << " " << cur_features.gripper_1.loc[0] << " " << cur_features.gripper_1.loc[1] << " " << cur_features.gripper_1.loc[2]
                           << " " << cur_features.gripper_2.loc[0] << " " << cur_features.gripper_2.loc[1] << " " << cur_features.gripper_2.loc[2]
                           << std::endl;
            feature_writer.close();
            std::cout << "features saved at " << feature_file << std::endl;
        }

        char screenshot[200]; // make sure it's big enough
        std::snprintf(screenshot, sizeof(screenshot), "%s/video/sec_%d.png", result_folder, (int)video_id);
        std::cout << screenshot << std::endl;
        result_viewer->saveScreenshot(screenshot);

        idx=idx+step;
        video_id=video_id+1;
    }

    mainGraph.displayMainGraph();
    mainGraph.recordMainGraph(mainGraph_file);


    while (!result_viewer->wasStopped ())
    {
        result_viewer->spinOnce (100);
        boost::this_thread::sleep (boost::posix_time::microseconds (100000));
    }

}
/** \brief The machine learning classifier, results are stored in the ClusterData structs.
  * \param[in] cloud_in A pointer to the input point cloud.
  * \param[in/out] clusters_data An array of information holders for each cluster
  */
void
applyObjectClassification (const pcl::PointCloud<PointType>::Ptr cloud_in, boost::shared_ptr<std::vector<ClusterData> > &clusters_data)
{
  // Set up the machine learnin class
  pcl::SVMTrain ml_svm_training; // To train the classifier
  pcl::SVMClassify ml_svm_classify; // To classify

  std::vector<pcl::SVMData> featuresSet; // Create the input vector for the SVM class
  std::vector<std::vector<double> > predictionOut; // Prediction output vector
  // If the input model_filename exists, it starts the classification.
  // Otherwise it starts a new machine learning training.
  if (global_data.model.size() > 0)
  {
    if (!ml_svm_classify.loadClassifierModel (global_data.model.data()))
      return;
    pcl::console::print_highlight (stderr, "Loaded ");
    pcl::console::print_value (stderr, "%s ", global_data.model.data());

    // Copy the input vector for the SVM classification
    for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
      featuresSet.push_back ( (*clusters_data) [c_it].features);

    ml_svm_classify.setInputTrainingSet (featuresSet); // Set input clusters set
    ml_svm_classify.saveNormClassProblem ("data_input"); //Save clusters features
    ml_svm_classify.setProbabilityEstimates (1); // Estimates the probabilities
    ml_svm_classify.classification ();
  }
  else
  {
    // Currently: analyze on voxels of 0.08 x 0.08 x 0.08 meter with slight alteration based on cluster aggressiveness
    float resolution = 0.08 * global_data.scale / pow (0.5 + global_data.cagg, 2);
    // Create the viewer
    pcl::visualization::PCLVisualizer viewer ("cluster viewer");

    // Output classifier model name
    global_data.model.assign (global_data.cloud_name.data());
    global_data.model.append (".model");

    std::vector<bool> lab_cluster;// save whether a cluster is labelled
    std::vector<int> pt_clst_pos; // used to memorize in the total cloud, the point affiliation to the original cluster

    // fill the vector (1 = labelled, 0 = unlabelled)
    lab_cluster.resize ( (std::size_t) clusters_data->size ());
    for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
    {
      if ( (*clusters_data) [c_it].is_isolated)
      {
        (*clusters_data) [c_it].features.label = 0;
        lab_cluster[c_it] = 1;
      }
      else
        lab_cluster[c_it] = 0;
    }

    // Build a cloud with unlabelled clusters
    pcl::PointCloud<PointType>::Ptr fragm_cloud (new pcl::PointCloud<PointType>);

    // Initialize the viewer
    initVisualizer (viewer);
    PointType picked_point; // changed whether a mouse click occours. It saves the selected cluster index
    viewer.registerPointPickingCallback (&pp_callback, (void *) &picked_point);
    
    // Create a cloud with unlabelled clusters
    for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
      if (!lab_cluster[c_it])
      {
        pcl::PointCloud<PointType>::Ptr cluster (new pcl::PointCloud<PointType>);
        pcl::copyPointCloud (*cloud_in, * (*clusters_data) [c_it].indices, *cluster);
	
	// Downsample cluster
	pcl::VoxelGrid<PointType> sor;
	sor.setInputCloud (cluster);
        sor.setLeafSize (resolution, resolution, resolution);
	sor.filter (*cluster);
	
	// Copy cluster into a global cloud
        fragm_cloud->operator+= (*cluster);

        // Fill a vector to memorize the original affiliation of a point to the cluster
        for (int clust_pt = 0; clust_pt < cluster->size(); clust_pt++)
          pt_clst_pos.push_back (c_it);

        // Add cluster to the viewer
        std::stringstream cluster_name;
        cluster_name << "cluster" << c_it;
        pcl::visualization::PointCloudColorHandlerGenericField<PointType> rgb (cluster, "intensity");// Get color handler for the cluster cloud
        viewer.addPointCloud<PointType> (cluster, rgb, cluster_name.str().data());
      }
    
    // Create a tree for point searching in the total cloud
    pcl::KdTreeFLANN<pcl::PointXYZI> tree_;
    tree_.setInputCloud (fragm_cloud);

    // Visualize the whole cloud
    int selected = -1; // save the picked cluster
    bool stop = 0;
    while (!viewer.wasStopped())
    {
      viewer.registerKeyboardCallback (keyboardEventOccurred, (void*) &stop);
      boost::this_thread::sleep (boost::posix_time::microseconds (100000));
      viewer.spinOnce (500);
      
      if (picked_point.x != 0.0f || picked_point.y != 0.0f || picked_point.z != 0.0f)  // if a point is clicked
      {
        std::vector<int> pointIdxNKNSearch (1);
        std::vector<float> pointNKNSquaredDistance (1);
	pcl::PointCloud<PointType>::Ptr cluster (new pcl::PointCloud<PointType>);
        
        tree_.nearestKSearch (picked_point, 1, pointIdxNKNSearch, pointNKNSquaredDistance);
	selected = pt_clst_pos[pointIdxNKNSearch[0]];
	
	viewer.removePointCloud("cluster");
	pcl::copyPointCloud (*cloud_in, * (*clusters_data) [selected].indices, *cluster);
	pcl::visualization::PointCloudColorHandlerGenericField<PointType> rgb (cluster, "intensity");
	viewer.addPointCloud<PointType> (cluster, rgb, "cluster");

        picked_point.x = 0.0f;
        picked_point.y = 0.0f;
        picked_point.z = 0.0f;
      }
      
      if(selected != -1 && stop)
      {
	std::stringstream cluster_name;
        cluster_name << "cluster" << selected;
        lab_cluster[ selected ] = 1; // cluster is marked as labelled
        (*clusters_data) [ selected ].features.label = 1; // the cluster is set as a noise
        viewer.removePointCloud(cluster_name.str().data());
	viewer.removePointCloud("cluster");
        stop = 0;
	selected = -1;
      }
    }

    // Close the viewer
    viewer.close();

    // The remaining unlabelled clusters are marked as "good"
    for (int c_it = 0; c_it < lab_cluster.size(); c_it++)
    {
      if (!lab_cluster[c_it])
        (*clusters_data) [c_it].features.label = 0; // Mark remaining clusters as good
    }

    // Copy the input vector for the SVM classification
    for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
      featuresSet.push_back ( (*clusters_data) [c_it].features);

    // Setting the training classifier
    pcl::SVMParam trainParam;
    trainParam.probability = 1; // Estimates the probabilities
    trainParam.C = 512; // Initial C value of the classifier
    trainParam.gamma = 2; // Initial gamma value of the classifier

    ml_svm_training.setInputTrainingSet (featuresSet);  // Set input training set
    ml_svm_training.setParameters (trainParam);
    ml_svm_training.trainClassifier(); // Train the classifier
    ml_svm_training.saveClassifierModel (global_data.model.data()); // Save classifier model
    pcl::console::print_highlight (stderr, "Saved ");
    pcl::console::print_value (stderr, "%s ", global_data.model.data());
    ml_svm_training.saveTrainingSet ("data_input"); // Save clusters features normalized

    // Test the current classification
    ml_svm_classify.loadClassifierModel (global_data.model.data());
    ml_svm_classify.setInputTrainingSet (featuresSet);
    ml_svm_classify.setProbabilityEstimates (1);
    ml_svm_classify.classificationTest ();
  }

  ml_svm_classify.saveClassificationResult ("prediction"); // save prediction in outputtext file
  ml_svm_classify.getClassificationResult (predictionOut);

  // Get labels order
  std::vector<int> labels;
  ml_svm_classify.getLabel (labels);

  // Store the boolean output inside clusters_data
  for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
    switch ( (int) predictionOut[c_it][0])
    {
      case 0:
        (*clusters_data) [c_it].is_good = true;
        break;
      case 1:
        (*clusters_data) [c_it].is_ghost = true;
        break;
      case 2:
        (*clusters_data) [c_it].is_tree = true;
        break;
    }

  // Store the percentage output inside cluster_data
  for (size_t lab = 0; lab < labels.size (); lab++)
    switch (labels[lab])
    {
      case 0:
        for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
        {
          (*clusters_data) [c_it].is_good_prob = predictionOut[c_it][lab + 1];
        }
        break;
      case 1:
        for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
        {
          (*clusters_data) [c_it].is_ghost_prob = predictionOut[c_it][lab + 1];
        }
        break;
      case 2:
        for (size_t c_it = 0; c_it < clusters_data->size (); ++c_it)
        {
          (*clusters_data) [c_it].is_tree_prob = predictionOut[c_it][lab + 1];
        }
        break;
    }
};
コード例 #21
0
ファイル: main.c プロジェクト: mataamad/Mariokart2
int main(void)
{
	init(); //Initialise things
	for(;;)
	{
		//Check the estop
		if(ESTOP_PRESSED)
		{
			//Wait until it is released
			while(ESTOP_PRESSED || BRAKE_PRESSED)
			{
				SHUTDOWN;
				rgb(red);
				//timer1PWMAOff(); //Stop PWM
				delay_us(100); //Wait
			}
			RESTART;
			rgb(green);
			//timer1PWMA(); //Restart PWM
		}
		
		if(BRAKE_PRESSED)
		{
			//Start at the current duty cycle
			s16 braking_duty = pedal_input;
			
			//While the brake pedal is pressed do regenerative braking
			while(BRAKE_PRESSED)
			{
				rgb(purple);
				
				//Read the average current value
				s16 current = a2dConvert10bit(AVG_CURRENT_PIN);
				
				//Calculate the error from the target braking current
				s16 error = BRAKING_CURRENT - current;
				
				//Scale the error and add to braking duty
				braking_duty += error/10;
				
				//Stop the duty from going negative
				if(braking_duty < 0)
				{
					braking_duty = 0;
				}
				
				//Set the PWM duty
				timer1PWMASet(TOP_COUNT - braking_duty); //inverted
			
			}
			rgb(green);
		}
		
		//Check the average current
		int current = a2dConvert10bit(AVG_CURRENT_PIN);
		
		//Accumulate the amp-seconds the current is above the target current
		ampSeconds += (current - TARGET_CURRENT);
		
		//Stop the amp-seconds from going negative
		if(ampSeconds < 0) 
		{
			ampSeconds = 0;
		}
		
		//Shut down the controller if the current exceeds the maximum current
		// or the amp-seconds has exceeded the maximum allowable value
		if(ampSeconds > MAX_AMP_SECONDS || current > MAX_CURRENT)		
		{
			SHUTDOWN;
			rgb(blue);
		}
		else 
		{
			RESTART;
			rgb(green);
		}		
		
		//Get the position of the accelerator pedal
		pedal_input = a2dConvert10bit(ACCEL_PIN);
		//mod
		if(pedal_input < 50) pedal_input = 0; else pedal_input = pedal_input - 50;
		
		//Rescale between 0 and TOP_COUNT
		pedal_input *= (TOP_COUNT/1024); 
		
		//Set the PWM Duty
		timer1PWMASet(TOP_COUNT - pedal_input); //inverted
	
	}


}
void environement_identification(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filtered){
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_f (new pcl::PointCloud<pcl::PointXYZRGB>);
pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_filtered_inliers (new pcl::PointCloud<pcl::PointXYZRGB> ());

    // Create the segmentation object for the planar model and set all the parameters
     pcl::SACSegmentation<pcl::PointXYZRGB> seg;
     pcl::PointIndices::Ptr inliers (new pcl::PointIndices);
     pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients);
     pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_plane (new pcl::PointCloud<pcl::PointXYZRGB> ());
     seg.setOptimizeCoefficients (true);
     seg.setModelType (pcl::SACMODEL_PLANE);
     seg.setMethodType (pcl::SAC_RANSAC);
     seg.setMaxIterations (100);
     seg.setDistanceThreshold (0.02);




     int i=0, nr_points = (int) cloud_filtered->points.size ();
       //while (cloud_filtered->points.size () > 0.3 * nr_points)
       //{
         // Segment the largest planar component from the remaining cloud
         seg.setInputCloud (cloud_filtered);
         seg.segment (*inliers, *coefficients);
         if (inliers->indices.size () == 0)
         {
           std::cout << "Could not estimate a planar model for the given dataset." << std::endl;
           //break;
         }else{

         // Extract the planar inliers from the input cloud
         pcl::ExtractIndices<pcl::PointXYZRGB> extract;
         extract.setInputCloud (cloud_filtered);
         extract.setIndices (inliers);
         extract.setNegative (false);

         // Get the points associated with the planar surface
         extract.filter (*cloud_plane);
         //std::cout << "PointCloud representing the planar component: " << cloud_plane->points.size () << " data points." << std::endl;

         // Remove the planar inliers, extract the rest
         extract.setNegative (true);
         extract.filter (*cloud_f);
         *cloud_filtered = *cloud_f;
         }
      // }


       // Create the filtering object
        pcl::StatisticalOutlierRemoval<pcl::PointXYZRGB> sor;
        sor.setInputCloud (cloud_filtered);
        sor.setMeanK (50);
        sor.setStddevMulThresh (1.0);
        sor.filter (*cloud_filtered_inliers);


   std::cout << "cloud filtred after plane extraction : "<<cloud_filtered_inliers->size()<< std::endl;


       // Creating the KdTree object for the search method of the extraction
        pcl::search::KdTree<pcl::PointXYZRGB>::Ptr tree (new pcl::search::KdTree<pcl::PointXYZRGB>);
        tree->setInputCloud (cloud_filtered_inliers);

        std::vector<pcl::PointIndices> cluster_indices;
        pcl::EuclideanClusterExtraction<pcl::PointXYZRGB> ec;
        ec.setClusterTolerance (0.09); // 2cm
        ec.setMinClusterSize (100);
        ec.setMaxClusterSize (25000);
        ec.setSearchMethod (tree);
        ec.setInputCloud (cloud_filtered_inliers);
        ec.extract (cluster_indices);

    std::cout << "cluster number : "<<cluster_indices.size()<< std::endl;
        int j = 0;

          for (std::vector<pcl::PointIndices>::const_iterator it = cluster_indices.begin (); it != cluster_indices.end (); ++it)
          {
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_cluster (new pcl::PointCloud<pcl::PointXYZRGB>);

            for (std::vector<int>::const_iterator pit = it->indices.begin (); pit != it->indices.end (); ++pit){

                j=j%(sizeof(ColorB)/sizeof(*ColorB));
                cloud_filtered_inliers->points[*pit].rgb = *reinterpret_cast<float*>(new uint32_t(static_cast<uint32_t>(ColorR[j]) << 16 |
                                                                                          static_cast<uint32_t>(ColorG[j]) << 8  |
                                                                                          static_cast<uint32_t>(ColorB[j])        ));

                cloud_cluster->points.push_back (cloud_filtered_inliers->points[*pit]);
            }

                cloud_cluster->width = cloud_cluster->points.size ();
                cloud_cluster->height = 1;
                cloud_cluster->is_dense = true;

            std::cout << "PointCloud representing the Cluster: " << cloud_cluster->points.size () << " data points." << std::endl;


            pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(cloud_cluster);
            (viewer)->addPointCloud<pcl::PointXYZRGB> (cloud_cluster, rgb, "cloud identification"+j);

            j++;
          }



}
コード例 #23
0
ファイル: png_io.hpp プロジェクト: Jiangyangyang/mapnik
void save_as_png8_oct(T1 & file,
                      T2 const& image,
                      png_options const& opts)
{
    // number of alpha ranges in png8 format; 2 results in smallest image with binary transparency
    // 3 is minimum for semitransparency, 4 is recommended, anything else is worse
    const unsigned TRANSPARENCY_LEVELS = (opts.trans_mode==2||opts.trans_mode<0)?MAX_OCTREE_LEVELS:2;
    unsigned width = image.width();
    unsigned height = image.height();
    unsigned alphaHist[256];//transparency histogram
    unsigned semiCount = 0;//sum of semitransparent pixels
    unsigned meanAlpha = 0;

    if (opts.trans_mode == 0)
    {
        meanAlpha = 255;
    }
    else
    {
        for(int i=0; i<256; i++)
        {
            alphaHist[i] = 0;
        }
        for (unsigned y = 0; y < height; ++y)
        {
            for (unsigned x = 0; x < width; ++x)
            {
                unsigned val = U2ALPHA((unsigned)image.getRow(y)[x]);
                alphaHist[val]++;
                meanAlpha += val;
                if (val>0 && val<255)
                {
                    semiCount++;
                }
            }
        }
        meanAlpha /= width*height;
    }

    // transparency ranges division points
    unsigned limits[MAX_OCTREE_LEVELS+1];
    limits[0] = 0;
    limits[1] = (opts.trans_mode!=0 && alphaHist[0]>0)?1:0;
    limits[TRANSPARENCY_LEVELS] = 256;
    for(unsigned j=2; j<TRANSPARENCY_LEVELS; j++)
    {
        limits[j] = limits[1];
    }
    if (opts.trans_mode != 0)
    {
        unsigned alphaHistSum = 0;
        for(unsigned i=1; i<256; i++)
        {
            alphaHistSum += alphaHist[i];
            for(unsigned j=1; j<TRANSPARENCY_LEVELS; j++)
            {
                if (alphaHistSum<semiCount*(j)/4)
                {
                    limits[j] = i;
                }
            }
        }
    }
    // avoid too wide full transparent range
    if (limits[1]>256/(TRANSPARENCY_LEVELS-1))
    {
        limits[1]=256/(TRANSPARENCY_LEVELS-1);
    }
    // avoid too wide full opaque range
    if (limits[TRANSPARENCY_LEVELS-1]<212)
    {
        limits[TRANSPARENCY_LEVELS-1]=212;
    }
    if (TRANSPARENCY_LEVELS==2)
    {
        limits[1]=127;
    }
    // estimated number of colors from palette assigned to chosen ranges
    unsigned cols[MAX_OCTREE_LEVELS];
    // count colors
    if (opts.trans_mode == 0)
    {
        for (unsigned j=0; j<TRANSPARENCY_LEVELS; j++)
        {
            cols[j] = 0;
        }
        cols[TRANSPARENCY_LEVELS-1] = width * height;
    }
    else
    {
        for (unsigned j=0; j<TRANSPARENCY_LEVELS; j++)
        {
            cols[j] = 0;
            for (unsigned i=limits[j]; i<limits[j+1]; i++)
            {
                cols[j] += alphaHist[i];
            }
        }
    }

    unsigned divCoef = width*height-cols[0];
    if (divCoef==0)
    {
        divCoef = 1;
    }
    cols[0] = cols[0]>0?1:0; // fully transparent color (one or not at all)

    if (opts.colors>=64)
    {
        // give chance less populated but not empty cols to have at least few colors(12)
        unsigned minCols = (12+1)*divCoef/(opts.colors-cols[0]);
        for(unsigned j=1; j<TRANSPARENCY_LEVELS; j++)
        {
            if (cols[j]>12 && cols[j]<minCols)
            {
                divCoef += minCols-cols[j];
                cols[j] = minCols;
            }
        }
    }
    unsigned usedColors = cols[0];
    for(unsigned j=1; j<TRANSPARENCY_LEVELS-1; j++)
    {
        cols[j] = cols[j]*(opts.colors-cols[0])/divCoef;
        usedColors += cols[j];
    }
    // use rest for most opaque group of pixels
    cols[TRANSPARENCY_LEVELS-1] = opts.colors-usedColors;

    //no transparency
    if (opts.trans_mode == 0)
    {
        limits[1] = 0;
        cols[0] = 0;
        cols[1] = opts.colors;
    }

    // octree table for separate alpha range with 1-based index (0 is fully transparent: no color)
    octree<rgb> trees[MAX_OCTREE_LEVELS];
    for(unsigned j=1; j<TRANSPARENCY_LEVELS; j++)
    {
        trees[j].setMaxColors(cols[j]);
    }
    for (unsigned y = 0; y < height; ++y)
    {
        typename T2::pixel_type const * row = image.getRow(y);
        for (unsigned x = 0; x < width; ++x)
        {
            unsigned val = row[x];
            // insert to proper tree based on alpha range
            for(unsigned j=TRANSPARENCY_LEVELS-1; j>0; j--)
            {
                if (cols[j]>0 && U2ALPHA(val)>=limits[j])
                {
                    trees[j].insert(mapnik::rgb(U2RED(val), U2GREEN(val), U2BLUE(val)));
                    break;
                }
            }
        }
    }
    unsigned leftovers = 0;
    std::vector<rgb> palette;
    palette.reserve(opts.colors);
    if (cols[0])
    {
        palette.push_back(rgb(0,0,0));
    }

    for(unsigned j=1; j<TRANSPARENCY_LEVELS; j++)
    {
        if (cols[j]>0)
        {
            if (leftovers>0)
            {
                cols[j] += leftovers;
                trees[j].setMaxColors(cols[j]);
                leftovers = 0;
            }
            std::vector<rgb> pal;
            trees[j].setOffset( static_cast<unsigned>(palette.size()));
            trees[j].create_palette(pal);
            leftovers = cols[j] - static_cast<unsigned>(pal.size());
            cols[j] = static_cast<unsigned>(pal.size());
            palette.insert(palette.begin(), pal.begin(), pal.end());
        }
    }

    //transparency values per palette index
    std::vector<unsigned> alphaTable;
    //alphaTable.resize(palette.size());//allow semitransparency also in almost opaque range
    if (opts.trans_mode != 0)
    {
        alphaTable.resize(palette.size() - cols[TRANSPARENCY_LEVELS-1]);
    }

    if (palette.size() > 16 )
    {
        // >16 && <=256 colors -> write 8-bit color depth
        image_data_8 reduced_image(width,height);
        reduce_8(image, reduced_image, trees, limits, TRANSPARENCY_LEVELS, alphaTable);
        save_as_png(file,palette,reduced_image,width,height,8,alphaTable,opts);
    }
    else if (palette.size() == 1)
    {
        // 1 color image ->  write 1-bit color depth PNG
        unsigned image_width  = ((width + 15) >> 3) & ~1U; // 1-bit image, round up to 16-bit boundary
        unsigned image_height = height;
        image_data_8 reduced_image(image_width,image_height);
        reduce_1(image,reduced_image,trees, limits, alphaTable);
        if (meanAlpha<255 && cols[0]==0)
        {
            alphaTable.resize(1);
            alphaTable[0] = meanAlpha;
        }
        save_as_png(file,palette,reduced_image,width,height,1,alphaTable,opts);
    }
コード例 #24
0
ファイル: Color.cpp プロジェクト: rmacnak-google/engine
Color Color::combineWithAlpha(float otherAlpha) const
{
    return colorWithOverrideAlpha(rgb(), (alpha() / 255.f) * otherAlpha);
}
コード例 #25
0
ファイル: dib.cpp プロジェクト: Asmodean-/Ishiiruka
wxPalette *wxDIB::CreatePalette() const
{
    wxCHECK_MSG( m_handle, NULL, wxT("wxDIB::CreatePalette(): invalid object") );

    DIBSECTION ds;
    if ( !GetDIBSection(m_handle, &ds) )
    {
        wxLogLastError(wxT("GetObject(hDIB)"));

        return 0;
    }

    // how many colours are we going to have in the palette?
    DWORD biClrUsed = ds.dsBmih.biClrUsed;
    if ( !biClrUsed )
    {
        // biClrUsed field might not be set
        biClrUsed = GetNumberOfColours(ds.dsBmih.biBitCount);
    }

    if ( !biClrUsed )
    {
        // bitmaps of this depth don't have palettes at all
        //
        // NB: another possibility would be to return
        //     GetStockObject(DEFAULT_PALETTE) or even CreateHalftonePalette()?
        return NULL;
    }

    MemoryHDC hDC;

    // LOGPALETTE struct has only 1 element in palPalEntry array, we're
    // going to have biClrUsed of them so add necessary space
    LOGPALETTE *pPalette = (LOGPALETTE *)
        malloc(sizeof(LOGPALETTE) + (biClrUsed - 1)*sizeof(PALETTEENTRY));
    wxCHECK_MSG( pPalette, NULL, wxT("out of memory") );

    // initialize the palette header
    pPalette->palVersion = 0x300;  // magic number, not in docs but works
    pPalette->palNumEntries = (WORD)biClrUsed;

    // and the colour table
    wxCharBuffer rgb(sizeof(RGBQUAD) * biClrUsed);
    RGBQUAD *pRGB = (RGBQUAD*)rgb.data();
    SelectInHDC selectHandle(hDC, m_handle);
    ::GetDIBColorTable(hDC, 0, biClrUsed, pRGB);
    for ( DWORD i = 0; i < biClrUsed; i++, pRGB++ )
    {
        pPalette->palPalEntry[i].peRed = pRGB->rgbRed;
        pPalette->palPalEntry[i].peGreen = pRGB->rgbGreen;
        pPalette->palPalEntry[i].peBlue = pRGB->rgbBlue;
        pPalette->palPalEntry[i].peFlags = 0;
    }

    HPALETTE hPalette = ::CreatePalette(pPalette);

    free(pPalette);

    if ( !hPalette )
    {
        wxLogLastError(wxT("CreatePalette"));

        return NULL;
    }

    wxPalette *palette = new wxPalette;
    palette->SetHPALETTE((WXHPALETTE)hPalette);

    return palette;
}
コード例 #26
0
ファイル: STColor4f.cpp プロジェクト: Acesine/CG-Stanford
/* Component-wise exponential. Applies to color components only. */
STColor4f STColor4f::Exp() const
{
    STColor3f rgb(*this);
    return STColor4f(rgb.Exp(), a);
}
コード例 #27
0
// --------------
// -----Main-----
// --------------
int
main (int argc, char** argv)
{

    //preparation du socket
    int sockfd, newsockfd, portno, pid, visualizerpid;
    socklen_t clilen;
    struct sockaddr_in serv_addr, cli_addr;

   sockfd = socket(AF_INET, SOCK_STREAM, 0);

   if (sockfd < 0)
       error("ERROR opening socket");
    bzero((char *) &serv_addr, sizeof(serv_addr));
    portno = 60588;
    serv_addr.sin_family = AF_INET;
    serv_addr.sin_addr.s_addr = INADDR_ANY;
    serv_addr.sin_port = htons(portno);
    if (bind(sockfd, (struct sockaddr *) &serv_addr,
             sizeof(serv_addr)) < 0)
             error("ERROR on binding");
    listen(sockfd,5);
    clilen = sizeof(cli_addr);



    update = (int *)mmap(NULL, sizeof *update, PROT_READ | PROT_WRITE,
                    MAP_SHARED | MAP_ANONYMOUS, -1, 0);

    *update = 0;

    shared_buffer_size = (int*)mmap(0, sizeof(int), PROT_READ|PROT_WRITE,
               MAP_SHARED | MAP_ANONYMOUS, -1, 0);
    *shared_buffer_size=50000;

     sharedbuffer = (float*)mmap(0, (*shared_buffer_size)*sizeof(float), PROT_READ|PROT_WRITE,
                   MAP_SHARED | MAP_ANONYMOUS, -1, 0);

     memset((void *)sharedbuffer, 0, (*shared_buffer_size)*sizeof(float));








 boost::shared_ptr<pcl::visualization::PCLVisualizer>  viewer = rgbVis();










  visualizerpid = fork();
  if (visualizerpid < 0)
  error("ERROR on fork");
  if (visualizerpid == 0)  {





    //  std::cout << "entier par le fils: " << point_cloud_ptr->points[0].x << std::endl;

   //son prepare for close window event
      signal(SIGHUP, sigHandler);
      while (1)
      {
      newsockfd = accept(sockfd,
             (struct sockaddr *) &cli_addr, &clilen);

        if (newsockfd < 0)
           error("ERROR on accept");
       pid = fork();
       if (pid < 0)
           error("ERROR on fork");
       if (pid == 0)  {
           close(sockfd);

           dostuff(newsockfd);
           *update = 1;

           exit(0);
       }
       else{ close(newsockfd);}
     }

      exit(0);

  }else{


      while (!viewer->wasStopped ())
      {

        if(*update){

            std::cout<<"first value "<<sharedbuffer[2]<<std::endl;
        //std::cout << "test0: " << update << std::endl;
            pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_ptr=createPointCloud();
            pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(point_cloud_ptr);

            viewer->addPointCloud<pcl::PointXYZRGB> (point_cloud_ptr, rgb, "sample cloud");
        *update=0;
        }
        viewer->spinOnce (100);
        boost::this_thread::sleep (boost::posix_time::microseconds (100000));

      }

      kill(visualizerpid, SIGHUP);
      close(sockfd);
  }


return 0;

}
コード例 #28
0
ファイル: STColor4f.cpp プロジェクト: Acesine/CG-Stanford
/* Component-wise log. Applies to color components only. */
STColor4f STColor4f::Log() const
{
    STColor3f rgb(*this);
    return STColor4f(rgb.Log(), a);
}
コード例 #29
0
ファイル: qcolor_p.cpp プロジェクト: AtlantisCD9/Qt
#ifndef QT_NO_COLORNAMES

/*
  CSS color names = SVG 1.0 color names + transparent (rgba(0,0,0,0))
*/

#ifdef rgb
#  undef rgb
#endif
#define rgb(r,g,b) (0xff000000 | (r << 16) |  (g << 8) | b)

static const struct RGBData {
    const char *name;
    uint  value;
} rgbTbl[] = {
    { "aliceblue", rgb(240, 248, 255) },
    { "antiquewhite", rgb(250, 235, 215) },
    { "aqua", rgb( 0, 255, 255) },
    { "aquamarine", rgb(127, 255, 212) },
    { "azure", rgb(240, 255, 255) },
    { "beige", rgb(245, 245, 220) },
    { "bisque", rgb(255, 228, 196) },
    { "black", rgb( 0, 0, 0) },
    { "blanchedalmond", rgb(255, 235, 205) },
    { "blue", rgb( 0, 0, 255) },
    { "blueviolet", rgb(138, 43, 226) },
    { "brown", rgb(165, 42, 42) },
    { "burlywood", rgb(222, 184, 135) },
    { "cadetblue", rgb( 95, 158, 160) },
    { "chartreuse", rgb(127, 255, 0) },
    { "chocolate", rgb(210, 105, 30) },
コード例 #30
0
//! called if an event happened.
bool CGUIColorSelectDialog::OnEvent(const SEvent& event)
{
	if (isEnabled())
	{
		switch(event.EventType)
		{
			case EET_GUI_EVENT:
			switch(event.GUIEvent.EventType)
			{
				case EGET_SPINBOX_CHANGED:
				{
					for ( u32 i = 0; i!= Battery.size (); ++i )
					{
						if ( event.GUIEvent.Caller == Battery[i] )
						{
							if (i<4)
							{
								video::SColor rgb((u32)Battery[0]->getValue(), (u32)Battery[1]->getValue(),
									(u32)Battery[2]->getValue(), (u32)Battery[3]->getValue());
								video::SColorHSL hsl;
								video::SColorf rgb2(rgb);
								hsl.fromRGB(rgb2);
								Battery[4]->setValue(hsl.Hue);
								Battery[5]->setValue(hsl.Saturation);
								Battery[6]->setValue(hsl.Luminance);
							}
							else
							{
								video::SColorHSL hsl(Battery[4]->getValue(), Battery[5]->getValue(),
									Battery[6]->getValue());
								video::SColorf rgb2;
								hsl.toRGB(rgb2);
								video::SColor rgb = rgb2.toSColor();
								Battery[1]->setValue((f32)rgb.getRed());
								Battery[2]->setValue((f32)rgb.getGreen());
								Battery[3]->setValue((f32)rgb.getBlue());
							}
						}
					}
					return true;
				}

				case EGET_ELEMENT_FOCUS_LOST:
					Dragging = false;
					break;
				case EGET_BUTTON_CLICKED:
					if (event.GUIEvent.Caller == CloseButton ||
						event.GUIEvent.Caller == CancelButton)
					{
						sendCancelEvent();
						remove();
						return true;
					}
					else
					if (event.GUIEvent.Caller == OKButton)
					{
						sendSelectedEvent();
						remove();
						return true;
					}
					break;

				case EGET_LISTBOX_CHANGED:
				case EGET_LISTBOX_SELECTED_AGAIN:
				default:
					break;
			}
			break;
		case EET_MOUSE_INPUT_EVENT:
			switch(event.MouseInput.Event)
			{
			case EMIE_LMOUSE_PRESSED_DOWN:
				DragStart.X = event.MouseInput.X;
				DragStart.Y = event.MouseInput.Y;
				Dragging = true;
				return true;
			case EMIE_LMOUSE_LEFT_UP:
				Dragging = false;
				return true;
			case EMIE_MOUSE_MOVED:
				if (Dragging)
				{
					// gui window should not be dragged outside its parent
					if (Parent)
						if (event.MouseInput.X < Parent->getAbsolutePosition().UpperLeftCorner.X +1 ||
							event.MouseInput.Y < Parent->getAbsolutePosition().UpperLeftCorner.Y +1 ||
							event.MouseInput.X > Parent->getAbsolutePosition().LowerRightCorner.X -1 ||
							event.MouseInput.Y > Parent->getAbsolutePosition().LowerRightCorner.Y -1)

							return true;

					move(core::position2d<s32>(event.MouseInput.X - DragStart.X, event.MouseInput.Y - DragStart.Y));
					DragStart.X = event.MouseInput.X;
					DragStart.Y = event.MouseInput.Y;
					return true;
				}
			default:
				break;
			}
		default:
			break;
		}
	}

	return IGUIElement::OnEvent(event);
}