Ejemplo n.º 1
0
void sexyFillScreen(const ShaderState &ss, const View& view, uint color0, uint color1, float alpha)
{
    if (alpha < epsilon || (color0 == 0 && color1 == 0))
        return;

    glDepthMask(GL_FALSE);
    glDisable(GL_DEPTH_TEST);
    const float2 ws = 1.2f * view.sizePoints;
    const float2 ps = -0.1f * view.sizePoints;
    const float t = globals.renderTime / 20.f;
    const uint a = ALPHAF(alpha);

    // 1 2
    // 0 3
    const VertexPosColor v[] = {
        VertexPosColor(ps,  a|rgb2bgr(lerpXXX(color0, color1, unorm_sin(t)))),
        VertexPosColor(ps + justY(ws), a|rgb2bgr(lerpXXX(color0, color1, unorm_sin(3.f * t)))),
        VertexPosColor(ps + ws,        a|rgb2bgr(lerpXXX(color0, color1, unorm_sin(5.f * t)))),
        VertexPosColor(ps + justX(ws), a|rgb2bgr(lerpXXX(color0, color1, unorm_sin(7.f * t)))),
    };
    static const uint i[] = {0, 1, 2, 0, 2, 3};
    DrawElements(ShaderColorDither::instance(), ss, GL_TRIANGLES, v, i, arraySize(i));

    glEnable(GL_DEPTH_TEST);
    glDepthMask(GL_TRUE);
}    
Ejemplo n.º 2
0
void FrameCapturer::rgb2bgr(ImageRGB& img) {
  ImageRGB::pixel_type pix,pix_end;

  for(pix=img.begin(),pix_end=img.end();
      pix!=pix_end;
      ++pix)
    rgb2bgr(*pix);
}
Ejemplo n.º 3
0
	void RichEdit::setFontColor(COLORREF color) {
		CHARFORMAT cf;
		memset(&cf, 0, sizeof(CHARFORMAT));
		cf.cbSize = sizeof(CHARFORMAT);
		cf.dwMask = CFM_COLOR;
		cf.crTextColor = rgb2bgr(color);
		_defaultCf.crTextColor = cf.crTextColor;
		SendMessage(getHwnd(), EM_SETSEL, 0xFFFFFFF, 0xFFFFFFF);
		SendMessage(getHwnd(), EM_SETCHARFORMAT, SCF_SELECTION, reinterpret_cast<LPARAM>(&cf));
	}
Ejemplo n.º 4
0
static void highlight_tag(ScintillaObject *sci, gint openingBracket,
                          gint closingBracket, gint color)
{
    scintilla_send_message(sci, SCI_SETINDICATORCURRENT, INDICATOR_TAGMATCH, 0);
    scintilla_send_message(sci, SCI_INDICSETSTYLE,
                            INDICATOR_TAGMATCH, INDIC_ROUNDBOX);
    scintilla_send_message(sci, SCI_INDICSETFORE, INDICATOR_TAGMATCH, rgb2bgr(color));
    scintilla_send_message(sci, SCI_INDICSETALPHA, INDICATOR_TAGMATCH, 60);
    scintilla_send_message(sci, SCI_INDICATORFILLRANGE,
                            openingBracket, closingBracket-openingBracket+1);
}
Ejemplo n.º 5
0
ImageRGB FrameCapturer::getFrame(){
    LOG(INFO) << __PRETTY_FUNCTION__;
    //TODO
    //int width, height, depth;
    //unsigned char *imgBytes = axis.getImageBytes(width, height, depth);
    //mirage::img::Coordinate img_size(width, height);

    axis.getDefaultBMPImage();
    int dummy;
    mirage::img::Coordinate img_size(axis.getWidth(), axis.getHeight());
    frame.resize(img_size,
            (ImageRGB::value_type*)axis.getImageBytes(dummy, dummy, dummy));
    rgb2bgr(frame);
    return frame;
}
Ejemplo n.º 6
0
static int vmwLoadPCX(int pcx_fd) {

   int debug=1,bpp;
   int x,y;
   int i,numacross,planes=0;
   int xsize,ysize,plane;
   int xmin,ymin,xmax,ymax,version;
   unsigned char pcx_header[128];
   unsigned char temp_byte;

    /*************** DECODE THE HEADER *************************/

    //    lseek(pcx_fd,0,SEEK_SET);

    read(pcx_fd,&pcx_header,128);

    xmin=(pcx_header[5]<<8)+pcx_header[4];
    ymin=(pcx_header[7]<<8)+pcx_header[6];

    xmax=(pcx_header[9]<<8)+pcx_header[8];
    ymax=(pcx_header[11]<<8)+pcx_header[10];

    version=pcx_header[1];
    bpp=pcx_header[3];

    if (debug) {

       fprintf(stderr,"Manufacturer: ");
       if (pcx_header[0]==10) fprintf(stderr,"Zsoft\n");
       else fprintf(stderr,"Unknown %i\n",pcx_header[0]);

       fprintf(stderr,"Version: ");

       switch(version) {
        case 0: fprintf(stderr,"2.5\n"); break;
        case 2: fprintf(stderr,"2.8 w palette\n"); break;
        case 3: fprintf(stderr,"2.8 w/o palette\n"); break;
        case 4: fprintf(stderr,"Paintbrush for Windows\n"); break;
        case 5: fprintf(stderr,"3.0+\n"); break;
        default: fprintf(stderr,"Unknown %i\n",version);
       }
       fprintf(stderr,"Encoding: ");
       if (pcx_header[2]==1) fprintf(stderr,"RLE\n");
       else fprintf(stderr,"Unknown %i\n",pcx_header[2]);

       fprintf(stderr,"BitsPerPixelPerPlane: %i\n",bpp);
       fprintf(stderr,"File goes from %i,%i to %i,%i\n",xmin,ymin,xmax,ymax);

       fprintf(stderr,"Horizontal DPI: %i\n",(pcx_header[13]<<8)+pcx_header[12]);
       fprintf(stderr,"Vertical   DPI: %i\n",(pcx_header[15]<<8)+pcx_header[14]);

       fprintf(stderr,"Number of colored planes: %i\n",pcx_header[65]);
       fprintf(stderr,"Bytes per line: %i\n",(pcx_header[67]<<8)+pcx_header[66]);
       fprintf(stderr,"Palette Type: %i\n",(pcx_header[69]<<8)+pcx_header[68]);
       fprintf(stderr,"Hscreen Size: %i\n",(pcx_header[71]<<8)+pcx_header[70]);
       fprintf(stderr,"Vscreen Size: %i\n",(pcx_header[73]<<8)+pcx_header[72]);

    }
    planes=pcx_header[65];

//    if ((version==5) && (bpp==8) && (pcx_header[65]==3)) type=24;
//    else if (version==5) type=8;
//    else type=0;

    xsize=((xmax-xmin)+1);
    ysize=((ymax-ymin)+1);

    char *output;

    output=calloc((xsize*ysize),sizeof(unsigned int));
    if (output==NULL) return -1;

   x=0; y=0;

   while(y<ysize) {
      for(plane=0;plane<planes;plane++) {
      x=0;
       while (x<xsize) {
          read(pcx_fd,&temp_byte,1);
          if (0xc0 == (temp_byte&0xc0)) {
	     numacross=temp_byte&0x3f;
	     read(pcx_fd,&temp_byte,1);
//	     fprintf(stderr,"%i pixels of %i\n",numacross,temp_byte);
	     for(i=0;i<numacross;i++) {
	       output[(y*xsize)+x]=temp_byte;
	       //	        printf("%x ",temp_byte);
		x++;
	     }
          }
          else {
//	     fprintf(stderr,"%i, %i Color=%i\n",x,y,temp_byte);
//	    printf("%x ",temp_byte);
	    output[(y*xsize)+x]=temp_byte;
	     x++;
          }
       }

      }
      y++;
   }

#define X_CHUNKSIZE 8
#define Y_CHUNKSIZE 8


   unsigned int plane0,plane1,plane2,plane3,offset;

   printf("%s_data:\n",label_prefix);
   int ychunk,xchunk;
   for(ychunk=0;ychunk<ysize/Y_CHUNKSIZE;ychunk++) {
      for(xchunk=0;xchunk<xsize/X_CHUNKSIZE;xchunk++) {
	printf("\t; Tile %d %d, Plane 0 Plane 1\n",xchunk,ychunk);

        for(y=0;y<Y_CHUNKSIZE;y++){
           plane0=0;plane1=0;
           for(x=0;x<X_CHUNKSIZE;x++) {
              plane0<<=1;
              plane1<<=1;

	      offset=((ychunk*Y_CHUNKSIZE+y)*xsize)+(xchunk*X_CHUNKSIZE)+x;
              plane0|=(output[offset])&1;
              plane1|=(((output[offset])&2)>>1);
	   }
           printf("\t.word $%02x%02x\n",plane1,plane0);
        }

        printf("\t; Plane 2 Plane 3\n");
        for(y=0;y<Y_CHUNKSIZE;y++){
           plane2=0;plane3=0;
           for(x=0;x<X_CHUNKSIZE;x++) {
              plane2<<=1;
              plane3<<=1;

	      offset=((ychunk*Y_CHUNKSIZE+y)*xsize)+(xchunk*X_CHUNKSIZE)+x;
              plane2|=(((output[offset])&4)>>2);
              plane3|=(((output[offset])&8)>>3);
	   }
	   printf("\t.word $%02x%02x\n",plane3,plane2);
	}
      }
   }

   printf("%s_palette:\n",label_prefix);

   /* read in palette */
   read(pcx_fd,&temp_byte,1);
   if (temp_byte!=12) {
     printf("Error!  No palette found!\n");
   }
   else {
     int r,g,b;
     for(i=0;i<16;i++) {
       read(pcx_fd,&temp_byte,1);
       r=temp_byte;
       read(pcx_fd,&temp_byte,1);
       g=temp_byte;
       read(pcx_fd,&temp_byte,1);
       b=temp_byte;
       printf("\t.word $%x\t; r=%x g=%x b=%x\n",rgb2bgr(r,g,b),r,g,b);
     }
   }

   //    close(pcx_fd);

    return 0;
}
Ejemplo n.º 7
0
	void RichEdit::setBackgroundColor(COLORREF color) {
		SendMessage(getHwnd(), EM_SETBKGNDCOLOR, 0, static_cast<LPARAM>(rgb2bgr(color)));
	}
Ejemplo n.º 8
0
int main(int argc, char* argv[]) {
    std::string hostname,user,password;
    int port;
    double pan,tilt,zoom;

    if(argc!=10) {
        std::cout << "Usage :" << std::endl
                  << "  " << argv[0]
                  << " <hostname> <port:80> <username> <password> <pan> <tilt> <zoom> <x> <y>"
                  << std::endl;
        return 0;
    }

    hostname = argv[1];
    port     = atoi(argv[2]);
    user     = argv[3];
    password = argv[4];

    axis::PTZ axis(hostname,port);
    if(!axis.connect(user,password)) {
        std::cout << "Connot connect " << user
                  << " (" << password << ") on "
                  << hostname << ':' << port << ". Aborting."
                  << std::endl;
        return 1;
    }

    axis.setAutoiris("off");
    axis.setIris(1000);

    axis.getPosition(pan,tilt,zoom);
    std::cout << "Current position is " << std::endl
              << "  pan  = " << pan << std::endl
              << "  tilt = " << tilt << std::endl
              << "  zoom = " << zoom << std::endl;

    pan  = atof(argv[5]);;
    tilt = atof(argv[6]);
    zoom = atof(argv[7]);

    std::cout << "Reaching now... " << std::endl
              << "  pan  = " << pan << std::endl
              << "  tilt = " << tilt << std::endl
              << "  zoom = " << zoom << std::endl;
    axis.setPanTilt(pan,tilt);
    axis.setZoom(zoom);
    axis.wait();

    std::cout << "... reached." << std::endl;
    axis.getPosition(pan,tilt,zoom);
    std::cout << "  pan  = " << pan << std::endl
              << "  tilt = " << tilt << std::endl
              << "  zoom = " << zoom << std::endl;

    // Let us now grab an image.
    // We wait 2 seconds for autofocus to stabilize, since we mah have zoomed.
    ost::Thread::sleep(2000);
    axis.getDefaultBMPImage();

    // Let us now handle the image with mirage
    ImageRGB img;
    int dummy;
    mirage::img::Coordinate img_size(axis.getWidth(),axis.getHeight());
    img.resize(img_size,
               (ImageRGB::value_type*)axis.getImageBytes(dummy,dummy,dummy));

    // Let is save the mirage image in a file.
    std::ostringstream outputnamestream;
    outputnamestream << "X_" << argv[8] << "Y_" << argv[9] << "pan_" << pan << "tilt_" << tilt << "zoom_" << zoom << ".jpg";
    std::string outputname = outputnamestream.str();
    rgb2bgr(img);
    mirage::img::JPEG::write(img,outputname,80);
    std::cout << "Image has been captured in ptz.jpg file." << std::endl;

    return 0;
}
Ejemplo n.º 9
0
//////////////////////////////////////////////////////////////////////////
//  This is the routine where we create the data being output by the Virtual
//  Camera device.
//	Modified as per red5 to allow for dropped frames and reset of time stamps
//
//  http://comSender.googlecode.com/svn/trunk/
//
//////////////////////////////////////////////////////////////////////////
HRESULT CVCamStream::FillBuffer(IMediaSample *pms)
{
	unsigned int imagesize, width, height;
	long l, lDataLen;
	bool bResult = false;
	DWORD dwSpoutPanel = 0;
	HRESULT hr=S_OK;;
    BYTE *pData;
	VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) m_mt.Format();


	// If graph is inactive stop cueing samples
	if(!m_pParent->IsActive()) {
		return S_FALSE;
	}

	// first get the timing right
	// create some working info
	REFERENCE_TIME rtNow, rtDelta, rtDelta2=0; // delta for dropped, delta 2 for sleep.
	REFERENCE_TIME avgFrameTime = ((VIDEOINFOHEADER*)m_mt.pbFormat)->AvgTimePerFrame;
	
	// Simple method - avoids "stuttering" in yawcam but VLC fails !
	/*
	rtNow = m_rtLastTime;
    m_rtLastTime += avgFrameTime;
    pms->SetTime(&rtNow, &m_rtLastTime);
    pms->SetSyncPoint(TRUE);
	*/

	// What Time is it REALLY ???
	// m_pClock is returned NULL with Skype, but OK for YawCam and VLC
	m_pParent->GetSyncSource(&m_pClock); 
	if(m_pClock) {
		m_pClock->GetTime(&refSync1);
		m_pClock->Release();
	}
	else {
		refSync1 = NumFrames*avgFrameTime;
	}


	if(NumFrames <= 1) {
		// initiate values
		refStart = refSync1; // FirstFrame No Drop.
		refSync2 = 0;
 	}

	// Set the timestamps that will govern playback frame rate.
    // The current time is the sample's start
	rtNow = m_rtLastTime;
	m_rtLastTime = avgFrameTime + m_rtLastTime;
	
	// IAMDropppedFrame. We only have avgFrameTime to generate image.
	// Find generated stream time and compare to real elapsed time
	rtDelta=((refSync1-refStart)-(((NumFrames)*avgFrameTime)-avgFrameTime));

	if(rtDelta-refSync2 < 0) { 
		//we are early
		rtDelta2=rtDelta-refSync2;
		if( abs(rtDelta2/10000)>=1)
			Sleep(abs(rtDelta2/10000));
	} // endif (rtDelta-refSync2 < 0)
	else if(rtDelta/avgFrameTime>NumDroppedFrames) {
		// new dropped frame
		NumDroppedFrames = rtDelta/avgFrameTime;
		// Figure new RT for sleeping
		refSync2 = NumDroppedFrames*avgFrameTime;
		// Our time stamping needs adjustment.
		// Find total real stream time from start time
		rtNow = refSync1-refStart;
		m_rtLastTime = rtNow+avgFrameTime;
		pms->SetDiscontinuity(true);
	} // end else if(rtDelta/avgFrameTime>NumDroppedFrames)

	// The SetTime method sets the stream times when this sample should begin and finish.
    hr = pms->SetTime(&rtNow, &m_rtLastTime);
	// Set true on every sample for uncompressed frames
    hr = pms->SetSyncPoint(true);
	// ============== END OF INITIAL TIMING ============

	// Check access to the sample's data buffer
    pms->GetPointer(&pData);
	if(pData == NULL) {
		return NOERROR;
	}


	// Get the current frame size for texture transfers
    imagesize = (unsigned int)pvi->bmiHeader.biSizeImage;
	width = (unsigned int)pvi->bmiHeader.biWidth;
	height = (unsigned int)pvi->bmiHeader.biHeight;
	if(width == 0 || height == 0) {
		return NOERROR;
	}

	// Don't do anything if disconnected because it will already have connected
	// previously and something has changed. It can only disconnect after it has connected.
	if(!bDisconnected) {

		// If connected, sizes should be OK, but check again
		unsigned int size = (unsigned int)pms->GetSize();
		imagesize = width*height*3; // Retrieved above
		if(size != imagesize) {
			if(bInitialized) receiver.ReleaseReceiver();
			bInitialized = false;
			bDisconnected = true; // don't try again
			return NOERROR;
		}


		// Quit if nothing running at all
		if(!receiver.GetActiveSender(g_ActiveSender)) {
			if(bInitialized) {
				receiver.ReleaseReceiver();
				bInitialized = false;
				// Reset the registry entries for SpoutCam
				dwSpoutPanel = 0;
				receiver.spout.interop.spoutdx.WriteDwordToRegistry(dwSpoutPanel, "Software\\Leading Edge\\SpoutCam\\", "SpoutPanel");
				receiver.spout.WritePathToRegistry("", "Software\\Leading Edge\\SpoutCam\\", "Sender");
			}
			goto ShowStatic;
		}

		// Has SpoutPanel been opened
		HANDLE hMutex = OpenMutexA(MUTEX_ALL_ACCESS, 0, "SpoutPanel");
		if(hMutex) {
			bSpoutPanelOpened = true;
			// We opened it so close it, otherwise it is never released
			CloseHandle(hMutex);
		}
		else {
			// Wait for SpoutPanel to close
			if(bSpoutPanelOpened) {
				// Check the registry for the SpoutPanel flag
				dwSpoutPanel = 0;
				if(receiver.spout.interop.spoutdx.ReadDwordFromRegistry(&dwSpoutPanel, "Software\\Leading Edge\\SpoutCam\\", "SpoutPanel")) {
					if(dwSpoutPanel == 1) {
						if(bInitialized) receiver.ReleaseReceiver();
						bInitialized = false; // start again
					}
				}
				// Reset the registry flag
				dwSpoutPanel = 0;
				receiver.spout.interop.spoutdx.WriteDwordToRegistry(dwSpoutPanel, "Software\\Leading Edge\\SpoutCam\\", "SpoutPanel");
				bSpoutPanelOpened = false;
			}
		} // end SpoutPanel check

		// everything ready
		if(!bInitialized) {

			// If not initialized, look for a sender
			if(receiver.GetActiveSender(g_SenderName)) {
				
				// Initialize OpenGl if is has not been done
				if(!bGLinitialized) {
					if(InitOpenGL()) {
						// Find out whether bgra extensions are supported at runtime
						// bBGRA = receiver.spout.interop.IsBGRAavailable();
						bBGRA = isExtensionSupported("GL_EXT_bgra");
						bGLinitialized = true;
						// Call OpenSpout so that OpenGL extensions are loaded
						receiver.spout.OpenSpout();
					}
					else {
						bGLinitialized = false;
						bDisconnected = true; // don't try again
						return NOERROR;
					}
				}

				// Found a sender so initialize the receiver
				if(receiver.CreateReceiver(g_SenderName, g_SenderWidth, g_SenderHeight)) {
					
					// Create a local rgba OpenGL texture to receive the sender's shared texture
					CreateSenderTexture(g_SenderWidth, g_SenderHeight);

					// Create a local rgb buffer for data tranfser from the shared texture
					if(g_senderBuffer) free((void *)g_senderBuffer);
					g_senderBuffer = (unsigned char *)malloc(g_SenderWidth*g_SenderHeight*3*sizeof(unsigned char));
							
					// Write the sender path to the registry for SpoutPanel
					receiver.spout.WritePathToRegistry(g_SenderName, "Software\\Leading Edge\\SpoutCam", "Sender");
					bInitialized = true;
					NumFrames++;
					return NOERROR; // no more for this frame
				}
				else {
					// TODO : what
				}
			} // end found a sender
		} // end not initialized
		else {

			// Receive the shared texture or memoryshare pixels into a local rgba OpenGL texture
			// The shared texture is inverted at the same time, so no software inversion is needed.

			width = g_SenderWidth; // for sender size check
			height = g_SenderHeight;

			if(receiver.ReceiveTexture(g_SenderName, width, height, g_senderTexture, GL_TEXTURE_2D, bInvert)) {

				// Sender size check
				if(g_SenderWidth != width || g_SenderHeight != height) {
					g_SenderWidth  = width;
					g_SenderHeight = height;
					// restart to initialize with the new size
					receiver.ReleaseReceiver();
					bInitialized = false;
					NumFrames++;
					return NOERROR;					
				}

				glBindTexture(GL_TEXTURE_2D, g_senderTexture);
				#ifdef GL_EXT_bgra // Or else GL_BGR_EXT is not defined
				if (bBGRA && g_SenderWidth == g_Width && g_SenderHeight == g_Height) {
					// If bgra is supported at runtime and the sizes match, transfer the
					// texture data directly to the filter pixel buffer using bgr format.
					glGetTexImage(GL_TEXTURE_2D, 0, GL_BGR_EXT, GL_UNSIGNED_BYTE, (void *)pData);
				} else
				#endif
				{
					// If GL_EXT_bgra is not supported by the compiler, or bgra is not
					// supported at runtime, or the sender and filter are different sizes,
					// load the sender buffer with rgb data for software conversion to bgr.
					glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB,  GL_UNSIGNED_BYTE, g_senderBuffer);
				}
				glBindTexture(GL_TEXTURE_2D, 0);

				if(g_SenderWidth != g_Width || g_SenderHeight != g_Height) {
					// For different sender and filter sizes, resample the rgb sender buffer into the bgr filter buffer.
					rgb2bgrResample(g_senderBuffer, (unsigned char *)pData, g_SenderWidth, g_SenderHeight, g_Width, g_Height);
				}
				else if(!bBGRA) {
					// Otherwise if the buffer dimensions match but bgra is not supported, convert from rgb to bgr.
					rgb2bgr(g_senderBuffer, (unsigned char *)pData, g_SenderWidth, g_SenderHeight);
				}

				NumFrames++;
				return NOERROR;

			} // endif received OK
			else {
				receiver.ReleaseReceiver();
				bInitialized = false;
			} // endif received texture OK
		} // endif initialized
	} // endif not disconnected

ShowStatic :

	// drop through to default static image if it did not work
	pms->GetPointer(&pData);
	lDataLen = pms->GetSize();
	for(l = 0; l <lDataLen; ++l) 
		pData[l] = rand();

	NumFrames++;

	return NOERROR;

} // FillBuffer