void Bitmap::Erosion() { int ImageWidth=ih.biWidth; int ImageHeight=ih.biHeight; // int structElement[2][2]={1,1,1,0}; int widthBytes = ((ImageWidth+31)&~31)/8; BYTE* newImageData = new BYTE[widthBytes*ImageHeight]; for (int y=0; y<ImageHeight; y++) { for (int x=0; x<widthBytes; x++) { newImageData[y*widthBytes+x]=255; } } for (int y=1; y<ImageHeight; y++) { for (int x=0; x<ImageWidth-1; x++) { if (getImageData(imageData, x, y, widthBytes)&& getImageData(imageData, x, y-1, widthBytes)&& getImageData(imageData, x+1, y, widthBytes) ) { resetImageData(newImageData,x, y, widthBytes); } } } delete[] imageData; imageData=newImageData; }
void Ti::TiBlob::ensureImageLoaded() { if (!_imageData.isValid()) { _imageData = getImageData(); } }
void QgsSvgCache::replaceParamsAndCacheSvg( QgsSvgCacheEntry *entry ) { if ( !entry ) { return; } QDomDocument svgDoc; if ( !svgDoc.setContent( getImageData( entry->path ) ) ) { return; } //replace fill color, stroke color, stroke with in all nodes QDomElement docElem = svgDoc.documentElement(); QSizeF viewboxSize; double sizeScaleFactor = calcSizeScaleFactor( entry, docElem, viewboxSize ); entry->viewboxSize = viewboxSize; replaceElemParams( docElem, entry->fill, entry->stroke, entry->strokeWidth * sizeScaleFactor ); entry->svgContent = svgDoc.toByteArray( 0 ); // toByteArray screws up tspans inside text by adding new lines before and after each span... this should help, at the // risk of potentially breaking some svgs where the newline is desired entry->svgContent.replace( "\n<tspan", "<tspan" ); entry->svgContent.replace( "</tspan>\n", "</tspan>" ); mTotalSize += entry->svgContent.size(); }
bool loadCubemapFromDirectory(const Texture &texture, const std::string &dirname, std::string *error) { static const std::string sidesStr[6] = { std::string("posx.png"), std::string("posy.png"), std::string("posz.png"), std::string("negx.png"), std::string("negy.png"), std::string("negz.png") }; static const GLuint sidesGl[6] { GL_TEXTURE_CUBE_MAP_POSITIVE_X, GL_TEXTURE_CUBE_MAP_POSITIVE_Y, GL_TEXTURE_CUBE_MAP_POSITIVE_Z, GL_TEXTURE_CUBE_MAP_NEGATIVE_X, GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, GL_TEXTURE_CUBE_MAP_NEGATIVE_Z }; int width, height, components; GLenum internalFormat, format; unique_ptr_stbi data(static_cast<uint8_t *>(nullptr), stbi_image_free); glBindTexture(GL_TEXTURE_CUBE_MAP, texture); for (int i = 0; i < 6; ++i) { std::string filename = dirname + "/" + sidesStr[i]; if (!getImageData(filename, error, data, &width, &height, &components, &internalFormat, &format)) { return false; } glTexImage2D(sidesGl[i], 0, internalFormat, width, height, 0, format, GL_UNSIGNED_BYTE, data.get()); } glBindTexture(GL_TEXTURE_CUBE_MAP, 0); return true; }
void XFormWidget::pushImageIn3DWindow() { V3dR_GLWidget* w = 0; if (mypara_3Dview.b_still_open && mypara_3Dview.window3D && (w = mypara_3Dview.window3D->getGLWidget())) { mypara_3Dview.image4d = getImageData(); w->updateImageData(); mypara_3Dview.window3D->setDataTitle(windowTitle()); } if (mypara_3Dlocalview.b_still_open && mypara_3Dlocalview.window3D && (w = mypara_3Dlocalview.window3D->getGLWidget())) { mypara_3Dlocalview.image4d = getImageData(); w->updateImageData(); mypara_3Dlocalview.window3D->setDataTitle(windowTitle()); } }
QObject *ItemImage::createExternalEditor(const QModelIndex &index, QWidget *parent) const { QString mime; QByteArray data; if ( !getImageData(index, &data, &mime) ) return NULL; const QString &cmd = mime.contains("svg") ? m_svgEditor : m_editor; return cmd.isEmpty() ? NULL : new ItemEditor(data, mime, cmd, parent); }
QList<QPolygon> XFormWidget::get3ViewROI() { QList<QPolygon> roi_list; My4DImage* img = getImageData(); if (img) { roi_list << img->p_xy_view->roiPolygon; roi_list << img->p_yz_view->roiPolygon; roi_list << img->p_zx_view->roiPolygon; } return roi_list; }
bool XFormWidget::set3ViewROI(QList<QPolygon> & roi_list) { if (roi_list.size() !=3) return false; My4DImage* img = getImageData(); if (img) { img->p_xy_view->roiPolygon = roi_list[0]; img->p_yz_view->roiPolygon = roi_list[1]; img->p_zx_view->roiPolygon = roi_list[2]; return true; } else return false; }
int worker_thread(void* arg) { mtx_t mtx; mtx_init(&mtx,1); while (1) { Tile* tile = tile_wait_pull(tiles_get); stbi_uc* data = getImageData(tile); mtx_lock(&mtx); tile->texdata = data; tiles_loaded[tiles_loaded_count++] = tile; mtx_unlock(&mtx); } return 0; }
int main(int argc, char** argv){ if(argc != 3){ cout << "USAGE :: ./Parse <image_path> <datafile_path>"; exit(-1); } string imagefile(argv[1]); string datafile(argv[2]); bool DEBUG = false; Mat Image = imread(imagefile, 1); if(DEBUG){ // show image namedWindow("image", CV_WINDOW_AUTOSIZE); imshow("image", Image); } // get image info in a struct AnnotatedDatasetInfo info = getImageData(Image, datafile); if(DEBUG){ // printing data int rows = 240; int cols = 320; cout << "Mat\n"; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) cout << info.Labels.at<int>(i, j) << " "; cout << endl; } cout << "\nDepth\n"; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) cout << info.Depths.at<Vec3f>(i, j)[0] << ", " << info.Depths.at<Vec3f>(i, j)[1] << ", " << info.Depths.at<Vec3f>(i, j)[2] << " :: "; cout << endl; } cout << "\nPoints\n"; for(int i = 0; i <= 1; i++) { for(int j = 0; j < info.Points[i].size(); j++) cout << "(" << info.Points[i][j].x << ", " << info.Points[i][j].y << "), "; cout << endl; } waitKey(0); } }
void initializeBitmapFont(FontDefinition& fontDefinition, int texture_id, const Color& fillColor) { int width = 0; int height = 0; ID3D11ShaderResourceView* texture = assets::getRawTexture(texture_id); assert(texture != 0); BYTE* data = getImageData(texture, &width, &height); int x = fontDefinition.startX + fontDefinition.padding - 1; int y = fontDefinition.startY + fontDefinition.padding; uint32 ascii = fontDefinition.startChar; Color c = getColor(data, x, y, width); bool running = true; bool isChar = false; int charStartedX = 0; int charStartedY = 0; int charCount = 0; while (running) { ++x; if (x > (fontDefinition.startX + fontDefinition.width)) { x = fontDefinition.startX + fontDefinition.padding - 1; y += fontDefinition.padding + fontDefinition.gridHeight;// - 1; isChar = false; charCount = 0; } if (y >= (fontDefinition.startY + fontDefinition.height)) { running = false; } if (y >= height) { running = false; } if (running) { c = getColor(data, x, y, width); if ( c != fillColor && !isChar) { isChar = true; charStartedX = x; charStartedY = y; } else if (c == fillColor && isChar) { isChar = false; int width = x - charStartedX - 1; ++charCount; fontDefinition.addChar(ascii, charStartedX + 1, charStartedY, width); ++ascii; } } } delete[] data; }
void QgsSvgCache::containsParams( const QString& path, bool& hasFillParam, QColor& defaultFillColor, bool& hasOutlineParam, QColor& defaultOutlineColor, bool& hasOutlineWidthParam, double& defaultOutlineWidth ) const { defaultFillColor = QColor( Qt::black ); defaultOutlineColor = QColor( Qt::black ); defaultOutlineWidth = 1.0; QDomDocument svgDoc; if ( !svgDoc.setContent( getImageData( path ) ) ) { return; } QDomElement docElem = svgDoc.documentElement(); containsElemParams( docElem, hasFillParam, defaultFillColor, hasOutlineParam, defaultOutlineColor, hasOutlineWidthParam, defaultOutlineWidth ); }
bool loadTextureFromFile(const Texture &texture, const std::string &filename, std::string *error) { int width, height, components; GLenum internalFormat, format; unique_ptr_stbi data(static_cast<uint8_t *>(nullptr), stbi_image_free); if (!getImageData(filename, error, data, &width, &height, &components, &internalFormat, &format)) { return false; } glBindTexture(GL_TEXTURE_2D, texture); glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, width, height, 0, format, GL_UNSIGNED_BYTE, data.get()); glBindTexture(GL_TEXTURE_2D, 0); return true; }
int main() { const int WIDTH = 1280, HEIGHT = 960; if (!openCamera(0)) { std::cout << "openCamera failed!" << std::endl; return true; } if (!initCamera()) { std::cout << "initCamera failed!" << std::endl; return true; } bool autov; setImageFormat(WIDTH, HEIGHT, 1, IMG_RAW8); setValue(CONTROL_EXPOSURE, 400, true); setValue(CONTROL_GAIN, 35, false); //int exposure_us = getValue(CONTROL_EXPOSURE, &autov); //int gain = getValue(CONTROL_GAIN, &autov); //int max_gain = getMax(CONTROL_GAIN); //std::cout << exposure_us << ", " << gain << ", " << max_gain << std::endl; IplImage *buffer = cvCreateImage(cvSize(WIDTH, HEIGHT), IPL_DEPTH_8U, 1); startCapture(); bool captured = false; do { std::chrono::milliseconds(10); captured = getImageData((unsigned char*)buffer->imageData, buffer->imageSize, -1); } while (!captured); cvSaveImage("sun_cam_2.jpg", buffer); stopCapture(); closeCamera(); return false; }
void QgsSvgCache::replaceParamsAndCacheSvg( QgsSvgCacheEntry* entry ) { if ( !entry ) { return; } QDomDocument svgDoc; if ( !svgDoc.setContent( getImageData( entry->file ) ) ) { return; } //replace fill color, outline color, outline with in all nodes QDomElement docElem = svgDoc.documentElement(); replaceElemParams( docElem, entry->fill, entry->outline, entry->outlineWidth ); entry->svgContent = svgDoc.toByteArray(); mTotalSize += entry->svgContent.size(); }
String HTMLCanvasElement::toDataURLInternal(const String& mimeType, const double* quality, bool isSaving) const { if (m_size.isEmpty() || !buffer()) return String("data:,"); String encodingMimeType = toEncodingMimeType(mimeType); // Try to get ImageData first, as that may avoid lossy conversions. RefPtr<ImageData> imageData = getImageData(); if (imageData) return ImageDataToDataURL(ImageDataBuffer(imageData->size(), imageData->data()), encodingMimeType, quality); if (m_context && m_context->is3d()) { toWebGLRenderingContext(m_context.get())->setSavingImage(isSaving); m_context->paintRenderingResultsToCanvas(); toWebGLRenderingContext(m_context.get())->setSavingImage(false); } return buffer()->toDataURL(encodingMimeType, quality); }
void QgsSvgCache::containsParams( const QString &path, bool &hasFillParam, bool &hasDefaultFillParam, QColor &defaultFillColor, bool &hasFillOpacityParam, bool &hasDefaultFillOpacity, double &defaultFillOpacity, bool &hasStrokeParam, bool &hasDefaultStrokeColor, QColor &defaultStrokeColor, bool &hasStrokeWidthParam, bool &hasDefaultStrokeWidth, double &defaultStrokeWidth, bool &hasStrokeOpacityParam, bool &hasDefaultStrokeOpacity, double &defaultStrokeOpacity ) const { hasFillParam = false; hasFillOpacityParam = false; hasStrokeParam = false; hasStrokeWidthParam = false; hasStrokeOpacityParam = false; defaultFillColor = QColor( Qt::white ); defaultFillOpacity = 1.0; defaultStrokeColor = QColor( Qt::black ); defaultStrokeWidth = 0.2; defaultStrokeOpacity = 1.0; hasDefaultFillParam = false; hasDefaultFillOpacity = false; hasDefaultStrokeColor = false; hasDefaultStrokeWidth = false; hasDefaultStrokeOpacity = false; QDomDocument svgDoc; if ( !svgDoc.setContent( getImageData( path ) ) ) { return; } QDomElement docElem = svgDoc.documentElement(); containsElemParams( docElem, hasFillParam, hasDefaultFillParam, defaultFillColor, hasFillOpacityParam, hasDefaultFillOpacity, defaultFillOpacity, hasStrokeParam, hasDefaultStrokeColor, defaultStrokeColor, hasStrokeWidthParam, hasDefaultStrokeWidth, defaultStrokeWidth, hasStrokeOpacityParam, hasDefaultStrokeOpacity, defaultStrokeOpacity ); }
QList<QRgb> D3D9Grabber::grabWidgetsColors(QList<GrabWidget *> &widgets) { QList<QRgb> result; m_rect = getEffectiveRect(widgets); if(m_rect.bottom > QApplication::desktop()->height()) m_rect.bottom = QApplication::desktop()->height(); if(m_rect.right > QApplication::desktop()->width()) m_rect.right = QApplication::desktop()->width(); int bufLengthNeeded = getBufLength(m_rect); if (bufLengthNeeded > m_bufLength) { if(m_buf != NULL) free(m_buf); m_buf = (BYTE *)malloc(bufLengthNeeded); m_bufLength = bufLengthNeeded; } getImageData(m_buf, m_rect); for(int i = 0; i < widgets.size(); i++) { GrabWidget * widget = widgets[i]; result.append(getColor(widget->x(), widget->y(), widget->width(), widget->height())); } return result; }
void QgsSvgCache::replaceParamsAndCacheSvg( QgsSvgCacheEntry* entry ) { if ( !entry ) { return; } QDomDocument svgDoc; if ( !svgDoc.setContent( getImageData( entry->file ) ) ) { return; } //replace fill color, outline color, outline with in all nodes QDomElement docElem = svgDoc.documentElement(); QSizeF viewboxSize; double sizeScaleFactor = calcSizeScaleFactor( entry, docElem, viewboxSize ); entry->viewboxSize = viewboxSize; replaceElemParams( docElem, entry->fill, entry->outline, entry->outlineWidth * sizeScaleFactor ); entry->svgContent = svgDoc.toByteArray(); mTotalSize += entry->svgContent.size(); }
CTgaImageFile::CTgaImageFile(const std::string filePath) { // Check file`s existance. if(!(m_fp = fopen(filePath.c_str(), "rb"))) { CLogger::failedLoadWarning(filePath, "File not found or broken"); return; } fread(&m_tgaIdLength, sizeof(uint8_t), 1, m_fp); fread(&m_tgaColorMapType, sizeof(uint8_t), 1, m_fp); fread(&m_tgaImageType, sizeof(uint8_t), 1, m_fp); fread(&m_tgaColorMapFirstEntry, sizeof(int16_t), 1, m_fp); fread(&m_tgaColorMapLength, sizeof(int16_t), 1, m_fp); fread(&m_tgaColorMapEntrySize, sizeof(uint8_t), 1, m_fp); fread(&m_tgaOriginX, sizeof(int16_t), 1, m_fp); fread(&m_tgaOriginY, sizeof(int16_t), 1, m_fp); fread(&m_tgaWidth, sizeof(int16_t), 1, m_fp); fread(&m_tgaHeight, sizeof(int16_t), 1, m_fp); fread(&m_tgaBPP, sizeof(uint8_t), 1, m_fp); fread(&m_tgaDescriptor, sizeof(uint8_t), 1, m_fp); m_components = m_tgaBPP / 8; m_bytes = m_tgaWidth * m_tgaHeight * m_components; // Check if the image type is supported by this loader. if(m_tgaImageType != TGA_RAW_BYTE_DATA and m_tgaImageType != TGA_RLE_COMPRESSED_DATA) { CLogger::failedLoadWarning(filePath, "Unsupported file type"); fclose(m_fp); return; } // Check if the BPP is supported by this loader and set the proper GL type. if(m_tgaBPP == 24) { m_colorType = GL_RGB; } else { if(m_tgaBPP == 32) { m_colorType = GL_RGBA; } else { if(m_tgaImageType == TGA_RAW_BYTE_DATA) { CLogger::failedLoadWarning(filePath, "Unsupported raw format"); fclose(m_fp); return; } else // RLE { CLogger::failedLoadWarning(filePath, "Unsupported compressed format"); fclose(m_fp); return; } } } // Get image data. getImageData(); // The file can be closed now as the data is in the memory. fclose(m_fp); // Flip correction. if(!(m_tgaDescriptor & TGA_FLIPPED_VERTICALLY)) { flipImageVertically(); } m_imgBaseWidth = m_tgaWidth; m_imgBaseHeight = m_tgaHeight; m_imgHasAlpha = (m_tgaBPP == 32) ? true : false; if(m_imgHasAlpha) { m_imgFormat = GL_RGBA; } else { m_imgFormat = GL_RGB; } m_imgType = GL_UNSIGNED_BYTE; m_imgLevelAmount = 1; m_imgValid = true; m_imgBlockSize = (m_tgaBPP == 32) ? 4 : 3; m_imgFilePath = filePath; }
asynStatus mar345::acquireFrame() { asynStatus status=asynSuccess; epicsTimeStamp startTime, currentTime; int eraseMode; epicsEventWaitStatus waitStatus; int imageCounter; int arrayCallbacks; double acquireTime; double timeRemaining; int size, res; int shutterMode, useShutter; char tempFileName[MAX_FILENAME_LEN]; char fullFileName[MAX_FILENAME_LEN]; //const char *functionName = "acquireframe"; /* Get current values of some parameters */ getDoubleParam(ADAcquireTime, &acquireTime); getIntegerParam(ADShutterMode, &shutterMode); getIntegerParam(mar345Size, &size); getIntegerParam(mar345Res, &res); getIntegerParam(NDArrayCallbacks, &arrayCallbacks); getIntegerParam(mar345EraseMode, &eraseMode); if (shutterMode == ADShutterModeNone) useShutter=0; else useShutter=1; epicsTimeGetCurrent(&this->acqStartTime); createFileName(MAX_FILENAME_LEN, tempFileName); /* We need to append the extension */ epicsSnprintf(fullFileName, sizeof(fullFileName), "%s.mar%d", tempFileName, imageSizes[res][size]); /* Erase before exposure if set */ if (eraseMode == mar345EraseBefore) { status = this->erase(); if (status) return(status); } /* Set the the start time for the TimeRemaining counter */ epicsTimeGetCurrent(&startTime); timeRemaining = acquireTime; if (useShutter) setShutter(1); /* Wait for the exposure time using epicsEventWaitWithTimeout, * so we can abort */ epicsTimerStartDelay(this->timerId, acquireTime); setIntegerParam(ADStatus, mar345StatusExpose); callParamCallbacks(); while(1) { if (epicsEventTryWait(this->abortEventId) == epicsEventWaitOK) { status = asynError; break; } this->unlock(); waitStatus = epicsEventWaitWithTimeout(this->stopEventId, MAR345_POLL_DELAY); this->lock(); if (waitStatus == epicsEventWaitOK) { /* The acquisition was stopped before the time was complete */ epicsTimerCancel(this->timerId); break; } epicsTimeGetCurrent(¤tTime); timeRemaining = acquireTime - epicsTimeDiffInSeconds(¤tTime, &startTime); if (timeRemaining < 0.) timeRemaining = 0.; setDoubleParam(ADTimeRemaining, timeRemaining); callParamCallbacks(); } setDoubleParam(ADTimeRemaining, 0.0); if (useShutter) setShutter(0); setIntegerParam(ADStatus, mar345StatusIdle); callParamCallbacks(); // If the exposure was aborted return error if (status) return asynError; setIntegerParam(ADStatus, mar345StatusScan); callParamCallbacks(); epicsSnprintf(this->toServer, sizeof(this->toServer), "COMMAND SCAN %s", fullFileName); setStringParam(NDFullFileName, fullFileName); callParamCallbacks(); writeServer(this->toServer); status = waitForCompletion("SCAN_DATA Ended o.k.", MAR345_COMMAND_TIMEOUT); if (status) { return asynError; } getIntegerParam(NDArrayCounter, &imageCounter); imageCounter++; setIntegerParam(NDArrayCounter, imageCounter); /* Call the callbacks to update any changes */ callParamCallbacks(); /* If arrayCallbacks is set then read the file back in */ if (arrayCallbacks) { getImageData(); } /* Erase after scanning if set */ if (eraseMode == mar345EraseAfter) status = this->erase(); return status; }
int main() { int width; char* bayer[] = {"RG","BG","GR","GB"}; char* controls[MAX_CONTROL] = {"Exposure", "Gain", "Gamma", "WB_R", "WB_B", "Brightness", "USB Traffic"}; int height; int i; char c; bool bresult; int time1,time2; int count=0; char buf[128]={0}; int CamNum=0; ///long exposure, exp_min, exp_max, exp_step, exp_flag, exp_default; //long gain, gain_min, gain_max,gain_step, gain_flag, gain_default; IplImage *pRgb; int numDevices = getNumberOfConnectedCameras(); if(numDevices <= 0) { printf("no camera connected, press any key to exit\n"); getchar(); return -1; } else printf("attached cameras:\n"); for(i = 0; i < numDevices; i++) printf("%d %s\n",i, getCameraModel(i)); printf("\nselect one to privew\n"); scanf("%d", &CamNum); bresult = openCamera(CamNum); if(!bresult) { printf("OpenCamera error,are you root?,press any key to exit\n"); getchar(); return -1; } printf("%s information\n",getCameraModel(CamNum)); int iMaxWidth, iMaxHeight; iMaxWidth = getMaxWidth(); iMaxHeight = getMaxHeight(); printf("resolution:%dX%d\n", iMaxWidth, iMaxHeight); if(isColorCam()) printf("Color Camera: bayer pattern:%s\n",bayer[getColorBayer()]); else printf("Mono camera\n"); for( i = 0; i < MAX_CONTROL; i++) { if(isAvailable((Control_TYPE)i)) printf("%s support:Yes\n", controls[i]); else printf("%s support:No\n", controls[i]); } printf("\nPlease input the <width height bin image_type> with one space, ie. 640 480 2 0. use max resolution if input is 0. Press ESC when video window is focused to quit capture\n"); int bin = 1, Image_type; scanf("%d %d %d %d", &width, &height, &bin, &Image_type); if(width == 0 || height == 0) { width = iMaxWidth; height = iMaxHeight; } initCamera(); //this must be called before camera operation. and it only need init once printf("sensor temperature:%02f\n", getSensorTemp()); // IMG_TYPE image_type; while(!setImageFormat(width, height, bin, (IMG_TYPE)Image_type))//IMG_RAW8 { printf("Set format error, please check the width and height\n ASI120's data size(width*height) must be integer multiple of 1024\n"); printf("Please input the width and height again£¬ie. 640 480\n"); scanf("%d %d %d %d", &width, &height, &bin, &Image_type); } printf("\nset image format %d %d %d %d success, start privew, press ESC to stop \n", width, height, bin, Image_type); if(Image_type == IMG_RAW16) pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_16U, 1); else if(Image_type == IMG_RGB24) pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 3); else pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 1); setValue(CONTROL_EXPOSURE, 100*1000, false); //ms//auto setValue(CONTROL_GAIN,getMin(CONTROL_GAIN), false); setValue(CONTROL_BANDWIDTHOVERLOAD, getMin(CONTROL_BANDWIDTHOVERLOAD), false); //low transfer speed setValue(CONTROL_WB_B, 90, false); setValue(CONTROL_WB_R, 48, false); setAutoPara(getMax(CONTROL_GAIN)/2,10,150); //max auto gain and exposure and target brightness // EnableDarkSubtract("dark.bmp"); //dark subtract will be disabled when exposure set auto and exposure below 500ms startCapture(); //start privew bDisplay = 1; #ifdef _LIN pthread_t thread_display; pthread_create(&thread_display, NULL, Display, (void*)pRgb); #elif defined _WINDOWS HANDLE thread_setgainexp; thread_setgainexp = (HANDLE)_beginthread(Display, NULL, (void*)pRgb); #endif time1 = GetTickCount(); int iStrLen = 0, iTextX = 40, iTextY = 60; void* retval; // int time0, iWaitMs = -1; // bool bGetImg; while(bMain) { // time0 = GetTickCount(); getImageData((unsigned char*)pRgb->imageData, pRgb->imageSize, 200); // bGetImg = getImageData((unsigned char*)pRgb->imageData, pRgb->imageSize, iWaitMs); time2 = GetTickCount(); // printf("waitMs%d, deltaMs%d, %d\n", iWaitMs, time2 - time0, bGetImg); count++; if(time2-time1 > 1000 ) { sprintf(buf, "fps:%d dropped frames:%lu ImageType:%d",count, getDroppedFrames(), (int)getImgType()); count = 0; time1=GetTickCount(); printf(buf); printf("\n"); } if(Image_type != IMG_RGB24 && Image_type != IMG_RAW16) { iStrLen = strlen(buf); CvRect rect = cvRect(iTextX, iTextY - 15, iStrLen* 11, 20); cvSetImageROI(pRgb , rect); cvSet(pRgb, CV_RGB(180, 180, 180)); cvResetImageROI(pRgb); } cvText(pRgb, buf, iTextX,iTextY ); if(bChangeFormat) { bChangeFormat = 0; bDisplay = false; pthread_join(thread_display, &retval); cvReleaseImage(&pRgb); stopCapture(); switch(change) { case change_imagetype: Image_type++; if(Image_type > 3) Image_type = 0; break; case change_bin: if(bin == 1) { bin = 2; width/=2; height/=2; } else { bin = 1; width*=2; height*=2; } break; case change_size_smaller: if(width > 320 && height > 240) { width/= 2; height/= 2; } break; case change_size_bigger: if(width*2*bin <= iMaxWidth && height*2*bin <= iMaxHeight) { width*= 2; height*= 2; } break; } setImageFormat(width, height, bin, (IMG_TYPE)Image_type); if(Image_type == IMG_RAW16) pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_16U, 1); else if(Image_type == IMG_RGB24) pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 3); else pRgb=cvCreateImage(cvSize(getWidth(),getHeight()), IPL_DEPTH_8U, 1); bDisplay = 1; pthread_create(&thread_display, NULL, Display, (void*)pRgb); startCapture(); //start privew } } END: if(bDisplay) { bDisplay = 0; #ifdef _LIN pthread_join(thread_display, &retval); #elif defined _WINDOWS Sleep(50); #endif } stopCapture(); closeCamera(); cvReleaseImage(&pRgb); printf("main function over\n"); return 1; }
bool Texture::loadImage(const char *_name) { glGenTextures(1, &TexID); glBindTexture(GL_TEXTURE_2D, TexID); //printf("allocating memory block for name\n"); name = (char*)malloc(strlen(_name)+1); //printf("copying name\n"); strcpy(name, _name); //printf("binding image...\n"); ilBindImage(image); //printf("done!\n"); bool ret = ilLoadImage(name); //printf("image loaded\n"); if(ret) { glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, ilGetInteger(IL_IMAGE_WIDTH), ilGetInteger(IL_IMAGE_HEIGHT), 0, GL_BGR, GL_UNSIGNED_BYTE, getImageData()); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); // GL_NEAREST glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glGenerateMipmap(GL_TEXTURE_2D); } return ret; }
int Context2D::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QObject::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: changed((*reinterpret_cast< const QImage(*)>(_a[1]))); break; case 1: save(); break; case 2: restore(); break; case 3: scale((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2]))); break; case 4: rotate((*reinterpret_cast< qreal(*)>(_a[1]))); break; case 5: translate((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2]))); break; case 6: transform((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< qreal(*)>(_a[6]))); break; case 7: setTransform((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< qreal(*)>(_a[6]))); break; case 8: { CanvasGradient _r = createLinearGradient((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); if (_a[0]) *reinterpret_cast< CanvasGradient*>(_a[0]) = _r; } break; case 9: { CanvasGradient _r = createRadialGradient((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< qreal(*)>(_a[6]))); if (_a[0]) *reinterpret_cast< CanvasGradient*>(_a[0]) = _r; } break; case 10: clearRect((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); break; case 11: fillRect((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); break; case 12: strokeRect((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); break; case 13: beginPath(); break; case 14: closePath(); break; case 15: moveTo((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2]))); break; case 16: lineTo((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2]))); break; case 17: quadraticCurveTo((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); break; case 18: bezierCurveTo((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< qreal(*)>(_a[6]))); break; case 19: arcTo((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5]))); break; case 20: rect((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); break; case 21: arc((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< bool(*)>(_a[6]))); break; case 22: fill(); break; case 23: stroke(); break; case 24: clip(); break; case 25: { bool _r = isPointInPath((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2]))); if (_a[0]) *reinterpret_cast< bool*>(_a[0]) = _r; } break; case 26: drawImage((*reinterpret_cast< DomImage*(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3]))); break; case 27: drawImage((*reinterpret_cast< DomImage*(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5]))); break; case 28: drawImage((*reinterpret_cast< DomImage*(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4])),(*reinterpret_cast< qreal(*)>(_a[5])),(*reinterpret_cast< qreal(*)>(_a[6])),(*reinterpret_cast< qreal(*)>(_a[7])),(*reinterpret_cast< qreal(*)>(_a[8])),(*reinterpret_cast< qreal(*)>(_a[9]))); break; case 29: { ImageData _r = getImageData((*reinterpret_cast< qreal(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3])),(*reinterpret_cast< qreal(*)>(_a[4]))); if (_a[0]) *reinterpret_cast< ImageData*>(_a[0]) = _r; } break; case 30: putImageData((*reinterpret_cast< ImageData(*)>(_a[1])),(*reinterpret_cast< qreal(*)>(_a[2])),(*reinterpret_cast< qreal(*)>(_a[3]))); break; default: ; } _id -= 31; } #ifndef QT_NO_PROPERTIES else if (_c == QMetaObject::ReadProperty) { void *_v = _a[0]; switch (_id) { case 0: *reinterpret_cast< qreal*>(_v) = globalAlpha(); break; case 1: *reinterpret_cast< QString*>(_v) = globalCompositeOperation(); break; case 2: *reinterpret_cast< QVariant*>(_v) = strokeStyle(); break; case 3: *reinterpret_cast< QVariant*>(_v) = fillStyle(); break; case 4: *reinterpret_cast< qreal*>(_v) = lineWidth(); break; case 5: *reinterpret_cast< QString*>(_v) = lineCap(); break; case 6: *reinterpret_cast< QString*>(_v) = lineJoin(); break; case 7: *reinterpret_cast< qreal*>(_v) = miterLimit(); break; case 8: *reinterpret_cast< qreal*>(_v) = shadowOffsetX(); break; case 9: *reinterpret_cast< qreal*>(_v) = shadowOffsetY(); break; case 10: *reinterpret_cast< qreal*>(_v) = shadowBlur(); break; case 11: *reinterpret_cast< QString*>(_v) = shadowColor(); break; } _id -= 12; } else if (_c == QMetaObject::WriteProperty) { void *_v = _a[0]; switch (_id) { case 0: setGlobalAlpha(*reinterpret_cast< qreal*>(_v)); break; case 1: setGlobalCompositeOperation(*reinterpret_cast< QString*>(_v)); break; case 2: setStrokeStyle(*reinterpret_cast< QVariant*>(_v)); break; case 3: setFillStyle(*reinterpret_cast< QVariant*>(_v)); break; case 4: setLineWidth(*reinterpret_cast< qreal*>(_v)); break; case 5: setLineCap(*reinterpret_cast< QString*>(_v)); break; case 6: setLineJoin(*reinterpret_cast< QString*>(_v)); break; case 7: setMiterLimit(*reinterpret_cast< qreal*>(_v)); break; case 8: setShadowOffsetX(*reinterpret_cast< qreal*>(_v)); break; case 9: setShadowOffsetY(*reinterpret_cast< qreal*>(_v)); break; case 10: setShadowBlur(*reinterpret_cast< qreal*>(_v)); break; case 11: setShadowColor(*reinterpret_cast< QString*>(_v)); break; } _id -= 12; } else if (_c == QMetaObject::ResetProperty) { _id -= 12; } else if (_c == QMetaObject::QueryPropertyDesignable) { _id -= 12; } else if (_c == QMetaObject::QueryPropertyScriptable) { _id -= 12; } else if (_c == QMetaObject::QueryPropertyStored) { _id -= 12; } else if (_c == QMetaObject::QueryPropertyEditable) { _id -= 12; } else if (_c == QMetaObject::QueryPropertyUser) { _id -= 12; } #endif // QT_NO_PROPERTIES return _id; }
Boolean captureImage(Widget window, char *filename) /* returns True on success */ { unsigned char Red[MAX_COLORMAP_SIZE], Green[MAX_COLORMAP_SIZE], Blue[MAX_COLORMAP_SIZE]; int numcols; int captured; int width, height; Boolean status; FILE *pngfile; int type; if (!ok_to_write(filename, "EXPORT") ) return(False); /* unmap the xfig windows, capture a png then remap our windows */ XtUnmapWidget(tool); XtUnmapWidget(window); app_flush(); /* capture the screen area */ status = getImageData(&width, &height, &type, &numcols, Red, Green, Blue); /* make sure server is ungrabbed if we're debugging */ app_flush(); /* map our windows again */ XtMapWidget(tool); XtMapWidget(window); if ( status == False ) { put_msg("Nothing Captured."); app_flush(); captured = False; } else { /* encode the image and write to the file */ put_msg("Writing screenshot to PNG file..."); app_flush(); if ((pngfile = fopen(filename,"wb"))==0) { file_msg("Cannot open PNG file %s for writing",filename); put_msg("Cannot open PNG file %s for writing",filename); captured = False; } else { /* write the png file */ if (!write_png(pngfile, data, type, Red, Green, Blue, numcols, width, height)) file_msg("Problem writing PNG file from screen capture"); fclose(pngfile); captured = True; } free(data); } return ( captured ); }