void EraseRectAndAlpha(GWorldPtr gWorld, Rect *pRect) { PixMapHandle pixMap = GetGWorldPixMap(gWorld); long rows; Ptr rowBaseAddr; LockPixels(pixMap); rows = pRect->bottom - pRect->top; rowBaseAddr = GetPixBaseAddr(pixMap) + (GetPixRowBytes(pixMap) & 0x3fff) * pRect->top + pRect->left * GetPixDepth(pixMap) / 8; do { long cols; UInt32 *baseAddr; cols = pRect->right - pRect->left; baseAddr = (UInt32*)rowBaseAddr; rowBaseAddr += (**pixMap).rowBytes & 0x3fff; do { *baseAddr++ = 0; } while (--cols); } while (--rows); UnlockPixels(pixMap); } // EraseRectAndAlpha
// left here for compatability ImageHandle EncodeTheRect(PixMapHandle thePixMap,Rect *theRect,short clearCol,short shadowCol) { EncodeRec theEncode=emptyEncodeRec; BCRec theRec; LockPixels(thePixMap); theEncode.theRec=&theRec; theRec.world=0L; theRec.pixMap=thePixMap; theRec.baseAddr=(unsigned char *)GetPixBaseAddr(thePixMap); theRec.rowBytes=(**thePixMap).rowBytes&0x3FFF; theEncode.theImage=0L; theEncode.theRect=theRect; theEncode.clearCol=clearCol; theEncode.shadowCol=shadowCol; theEncode.shrinkHandle=true; theEncode.shrinkRect=true; EckerEncodeTheRect(&theEncode); UnlockPixels(thePixMap); return theEncode.theImage; }
void drawPixelImageData() { int row, col; Rect rect; unsigned char value; char *image; int index = 0; Str255 string; RGBColor color = { 32000, 32000, 32000 }; //Byte mode; Rect tempRect1; ForeColor( blackColor ); SetRect( &rect, 0, 0, 20, 20 ); /* For this example, let's just use only the upper left corner of the image. */ // Draw the offscreen image to the screen to see what it looks like. //CopyBits( (BitMap *)*gPixmap, &gWindow->portBits, &rect, // &gWindow->portRect, srcCopy, 0 ); //(**gPixmap).rowBytes ^= 0x8000; CopyBits( (BitMap *)*gPixmap, GetPortBitMapForCopyBits(GetWindowPort(gWindow)), &rect, GetPortBounds(GetWindowPort(gWindow), &tempRect1), srcCopy, 0 ); //(**gPixmap).rowBytes ^= 0x8000; RGBForeColor( &color ); // Again, set the pointer to the beginning of the pixel image. image = GetPixBaseAddr( gPixmap ); /***************************************************************/ /* Finally let's display the pixel values on top of the image. */ /***************************************************************/ /* Loop through the first 20 rows of the pixel image. */ for (row = 0; row < rect.bottom; row++) { // Loop through the first 20 columns of the pixel image. for (index = 0, col = 0; col < rect.right; col++) { // Get the value at this index into the pixel image. value = (unsigned char)*(image + index); MoveTo( col * 30, row * 20 ); LineTo( col * 30, (row + 1) * 20 ); LineTo( (col + 1) * 30, (row + 1) * 20 ); MoveTo( (col * 30) + 6, (row * 20) + 14 ); NumToString( (long)value, string ); DrawString( string ); index++; } // Increment the pointer to the next row of the pixel image. image += ((**gPixmap).rowBytes & 0x7fff); } }
// CompressRLE // Main compress routine, this function will call the appropriate RLE compression // method depending on the pixel depth of the source image. OSErr CompressPixMapRLE(PixMapHandle pixMapHdl, Ptr compressBuffer, Size *compressBufferSizePtr) { Handle hdl = NULL; Ptr tempPtr = NULL,srcData; Ptr pixBaseAddr = GetPixBaseAddr(pixMapHdl); OSType pixelFormat = GETPIXMAPPIXELFORMAT(*pixMapHdl); int depth = QTGetPixelSize(pixelFormat); long rowBytes = QTGetPixMapHandleRowBytes(pixMapHdl); int width = (**pixMapHdl).bounds.right - (**pixMapHdl).bounds.left; int i, height = (**pixMapHdl).bounds.bottom - (**pixMapHdl).bounds.top; Size widthByteSize = (depth * (long)width + 7) >> 3; OSErr err = noErr; // need to remove padding between rows? if(widthByteSize != rowBytes){ // Make a temp buffer for the source hdl = NewHandle(height * widthByteSize); err = MemError(); if (err) goto bail; HLock(hdl); srcData = tempPtr = *hdl; // Get rid of row bytes padding for (i = 0; i < height; i++) { BlockMoveData(pixBaseAddr, tempPtr, widthByteSize); tempPtr += widthByteSize; pixBaseAddr += rowBytes; } }else srcData = pixBaseAddr; // Compress switch (depth) { case 1: CompressRLE8((UInt8*)srcData, height * widthByteSize, compressBuffer, compressBufferSizePtr); break; case 8: CompressRLE8((UInt8*)srcData, height * widthByteSize, compressBuffer, compressBufferSizePtr); break; case 16: CompressRLE16((UInt16*)srcData, height * (widthByteSize >> 1), compressBuffer, compressBufferSizePtr); break; case 32: CompressRLE32((UInt32*)srcData, height * (widthByteSize >> 2), compressBuffer, compressBufferSizePtr); break; } bail: if (hdl) DisposeHandle(hdl); return err; }
static void QT_DoAddVideoSamplesToMedia (int frame, int *pixels, int rectx, int recty, ReportList *reports) { OSErr err = noErr; Rect imageRect; int index; int boxsize; unsigned char *from, *to; short syncFlag; long dataSize; Handle compressedData; Ptr myPtr; //copy and flip renderdata memcpy(qtexport->ibuf2->rect, pixels, 4*rectx*recty); IMB_flipy(qtexport->ibuf2); //get pointers to parse bitmapdata myPtr = GetPixBaseAddr(qtexport->thePixMap); imageRect = (**qtexport->thePixMap).bounds; from = (unsigned char *) qtexport->ibuf2->rect; to = (unsigned char *) myPtr; //parse RGBA bitmap into Quicktime's ARGB GWorld boxsize = rectx * recty; for( index = 0; index < boxsize; index++) { to[0] = from[3]; to[1] = from[0]; to[2] = from[1]; to[3] = from[2]; to +=4, from += 4; } err = SCCompressSequenceFrame(qtdata->theComponent, qtexport->thePixMap, &imageRect, &compressedData, &dataSize, &syncFlag); CheckError(err, "SCCompressSequenceFrame error", reports); err = AddMediaSample(qtexport->theMedia, compressedData, 0, dataSize, qtdata->duration, (SampleDescriptionHandle)qtexport->anImageDescription, 1, syncFlag, NULL); CheckError(err, "AddMediaSample error", reports); }
//-------------------------------------------------------- void ofQtVideoSaver::setGworldPixel( GWorldPtr gwPtr, int r, int g, int b, short x, short y){ Ptr gwAddress, gwAddressBase; long gwWidth; char red, blue, green; gwAddressBase = GetPixBaseAddr( GetGWorldPixMap( gwPtr ) ); /* Get head address of offscreen */ gwWidth = ( **GetGWorldPixMap( gwPtr ) ).rowBytes & 0x3fff; /* Get with of offscreen */ gwAddress = gwAddressBase + ( x * 3 ) + ( y * gwWidth ); /* Get adress for current pixel */ *gwAddress = (unsigned char)r; /* Put red and move address forward */ *(gwAddress+1) = (unsigned char)g; /* Put green and move address forward */ *(gwAddress+2) = (unsigned char)b; /* Put blue */ }
static void expand_horizontality( PixMapHandle pixmap, int width, int height ) { //pixmap must be locked //for @TON(2) int x,y; Ptr baseAdr= GetPixBaseAddr(pixmap), xbase; int rowBytes= (**pixmap).rowBytes & 0x1FFF; for( y=0; y<height; y++){ xbase= baseAdr+ rowBytes*y; for( x=width-1; x>=0; x-- ){ xbase[x*2]= xbase[x*2+1]= xbase[x]; } } }
//////////////////////////////////////////////////////////////////////////////// // virtual unsigned char* LLMediaImplQuickTime::getMediaData() { unsigned char* ptr = NULL; if ( mGWorldHandle ) { PixMapHandle pix_map_handle = GetGWorldPixMap( mGWorldHandle ); ptr = ( unsigned char* )GetPixBaseAddr( pix_map_handle ); }; return ptr; }
// A handy routine which will return the colour of a pixel in a pixmap // It's not effiecient enough to be called a lot as it works out some values every time even // though they don't change. short GPixelColour(PixMapHandle thePixMap,short x,short y) { Ptr pixBase; short rowBytes,colour; LockPixels(thePixMap); pixBase=GetPixBaseAddr(thePixMap); rowBytes=(**thePixMap).rowBytes & 0x3fff; colour=(unsigned char)*(pixBase + y * rowBytes + x); UnlockPixels(thePixMap); return colour; }
void drawGraphic( CGContextRef context, float x, float y ) { static GWorldPtr imageGW = NULL; static CGImageRef imageRef = NULL; static CGDataProviderRef dataProviderRef = NULL; Rect bounds; static size_t width; static size_t height; size_t bitsPerComponent; size_t bitsPerPixel; size_t bytesPerRow; PixMapHandle pmh; // Load the image if we haven't already if ( NULL == imageGW ) { // Load and create the GWorld imageGW = OpenImage(); if ( imageGW != NULL ) { GetPortBounds( imageGW, &bounds ); width = bounds.right - bounds.left; height = bounds.bottom - bounds.top; pmh = GetPortPixMap( imageGW ); bitsPerComponent = (**pmh).cmpSize; bitsPerPixel = (**pmh).pixelSize; bytesPerRow = GetPixRowBytes( pmh ); LockPixels( pmh ); dataProviderRef = CGDataProviderCreateWithData( NULL, GetPixBaseAddr( pmh ), height * bytesPerRow, releaseData ); // Create the imageRef for that GWorld imageRef = CGImageCreate( width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, CGColorSpaceCreateDeviceRGB(), kCGImageAlphaPremultipliedFirst/*kCGImageAlphaNone*/, dataProviderRef, NULL, 0, kCGRenderingIntentDefault ); } } // Draw the image at 0,0 CGContextDrawImage( context, CGRectMake( x - 20, y, 40, 40 * height / width ), imageRef ); }
void dev_line(int x1, int y1, int x2, int y2, int color, int style, int pmask, PixMapHandle pixmap ) { int i, dx, dy, s, step; int rowBytes= (**pixmap).rowBytes & 0x1FFF; Ptr baseAdr= GetPixBaseAddr(pixmap); Rect bounds= (**pixmap).bounds; Point pt; static const int mask[8]={0x80,0x40,0x20,0x10, 0x08,0x04,0x02,0x01}; int style_count=0; #define DOT(x,y,col) {Ptr p=&baseAdr[y*rowBytes+x]; pt.h=x;pt.v=y; \ if(PtInRect(pt,&bounds)){(*p)&=~pmask; (*p)|=col;} } color &= pmask; step= ( (x1<x2)==(y1<y2) ) ? 1:-1; dx= abs(x2-x1); dy=abs(y2-y1); if( dx>dy ){ if( x1>x2 ){ x1=x2; y1=y2; } if(style & mask[style_count]){ DOT(x1,y1,color); } //else { DOT(x1,y1,0); } style_count= (style_count+1)%8; s= dx/2; for(i=x1+1; i<x1+dx; i++){ s-= dy; if( s<0 ){ s+=dx; y1+=step;} if(style & mask[style_count]){ DOT(i,y1,color); } //else{ DOT(i,y1,0); } style_count= (style_count+1)%8; } }else{ if( y1>y2 ){ x1=x2; y1=y2; } if(style & mask[style_count]){ DOT(x1,y1,color); } //else{ DOT(x1,y1,0); } style_count= (style_count+1)%8; s= dy/2; for(i=y1+1; i<y1+dy; i++){ s-= dx; if( s<0 ){ s+=dy; x1+=step;} if(style & mask[style_count]){ DOT(x1,i,color); } //else{ DOT(x1,i,0); } style_count= (style_count+1)%8; } } }
static pascal OSErr DrawCompleteProc (Movie theMovie, long refCon) { long int h; long int w; int y, x; GWorldPtr offWorld = (GWorldPtr) refCon; Rect bounds; Ptr baseAddr; long rowBytes; uint8_t* imbuf; mwSize dims[3]; GetPixBounds(GetGWorldPixMap(offWorld), &bounds); baseAddr = GetPixBaseAddr(GetGWorldPixMap(offWorld)); rowBytes = GetPixRowBytes(GetGWorldPixMap(offWorld)); h = rint(bounds.bottom - bounds.top); w = rint(bounds.right - bounds.left); dims[0] = h; dims[1] = w; dims[2] = 3; framedata = mxCreateNumericArray(3, dims, mxUINT8_CLASS, mxREAL); imbuf = (uint8_t*) mxGetData(framedata); // Retrieve the pixel data, unpack the RGB values and copy for (y = 0; y < h; ++y) { long *p; p = (long *) (baseAddr + rowBytes * (long) y); for (x = 0; x < w; ++x) { UInt32 color = *(long *)((long) p + 4 * (long) x);; long B = (color & 0xFF000000) >> 24; long G = (color & 0x00FF0000) >> 16; long R = (color & 0x0000FF00) >> 8; imbuf[y + x * h + 0 * (h * w)] = R; imbuf[y + x * h + 1 * (h * w)] = G; imbuf[y + x * h + 2 * (h * w)] = B; } } return noErr; }
PRIVATE GWorldPtr gworld_from_pict (PicHandle ph) { GWorldPtr retval; retval = NULL; if (ph) { CGrafPtr save_port; GDHandle save_device; Rect r; OSErr err; GetGWorld (&save_port, &save_device); save_port = MR (save_port); save_device = MR (save_device); r = HxX (ph, picFrame); err = NewGWorld (&retval, 32, &r, NULL, NULL, keepLocal); if (retval) { PixMapHandle pm; retval = MR (retval); SetGWorld (retval, NULL); pm = GetGWorldPixMap (retval); LockPixels (pm); DrawPicture (ph, &r); #if 0 #warning THIS INTERFERES WITH PICT PASTING { char *p; EraseRect (&r); p = GetPixBaseAddr (pm); memset (p, 0x00, 4 * RECT_HEIGHT(&r) * RECT_WIDTH (&r)); memset (p, 0xFF, 4 * RECT_HEIGHT(&r) * RECT_WIDTH (&r) / 2); } #endif UnlockPixels (pm); } SetGWorld (save_port, save_device); } return retval; }
PRIVATE SDL_Surface * surface_from_gworld (GWorldPtr gp) { SDL_Surface *retval; if (!gp) retval = NULL; else { int pixels_per_line; int n_lines; PixMapHandle pm; enum { A = 0x00000000, R = 0x0000FF00, G = 0x00FF0000, B = 0xFF000000 }; mac_pixel32 *ip; sdl_pixel24 *op; Rect r; pm = GetGWorldPixMap (gp); LockPixels (pm); r = PIXMAP_BOUNDS (pm); n_lines = RECT_HEIGHT (&r); pixels_per_line = RECT_WIDTH (&r); retval = SDL_AllocSurface (SDL_SWSURFACE, pixels_per_line, n_lines, 32, R, G, B, A); SDL_LockSurface (retval); op = SDL_Surface_pixels (retval); ip = (typeof (ip)) GetPixBaseAddr (pm); memcpy (op, ip, n_lines * pixels_per_line * sizeof *ip); #if 0 #warning THIS IS BROKEN memset (op, 0x00, 4 * n_lines * pixels_per_line); memset (op, 0xFF, 4 * n_lines * pixels_per_line / 2); #endif SDL_UnlockSurface (retval); UnlockPixels (pm); } return retval; }
void dev_box(PixMapHandle pixmap, Rect rect, int color, int pmask) { int rowBytes= (**pixmap).rowBytes & 0x1FFF, x, y1, width,hight, tmp; Ptr baseAdr= GetPixBaseAddr(pixmap); Rect bounds= (**pixmap).bounds; //check params //chech src top if( rect.top<bounds.top ){ rect.top=bounds.top; } if( rect.top>bounds.bottom ) return; //check left if( rect.left <bounds.left ){ rect.left= bounds.left; } if( rect.left>bounds.right ) return; //chech src bottom if( rect.bottom>bounds.bottom ){ rect.bottom= bounds.bottom; } if( rect.bottom<bounds.top ) return; //check right if( rect.right >bounds.right ){ rect.right= bounds.right; } if( rect.right<bounds.left ) return; width=rect.right-rect.left; hight=rect.bottom-rect.top; color &= pmask; for( y1=rect.top; y1<rect.bottom; y1++ ){ for( x=rect.left; x<rect.right; x++){ tmp=baseAdr[y1*rowBytes+x]; tmp &= ~pmask; tmp |= color; baseAdr[y1*rowBytes+x]=tmp; } } }
//------------------------------------------------------------------------ void pixel_map::draw(WindowRef window, const Rect *device_rect, const Rect *pmap_rect) const { if(m_pmap == nil || m_buf == NULL) return; PixMapHandle pm = GetGWorldPixMap (m_pmap); CGrafPtr port = GetWindowPort (window); Rect dest_rect; // Again, I used the Quicktime version. // Good old 'CopyBits' does better interpolation when scaling // but does not support all pixel depths. MacSetRect (&dest_rect, 0, 0, this->width(), this->height()); ImageDescriptionHandle image_description; MakeImageDescriptionForPixMap (pm, &image_description); if (image_description != nil) { DecompressImage (GetPixBaseAddr (pm), image_description, GetPortPixMap (port), nil, &dest_rect, ditherCopy, nil); DisposeHandle ((Handle) image_description); } }
short FindGWUnusedCol(GWorldPtr theWorld) { Ptr pixBase; short rowBytes,colour,colCount[255],xCount,yCount; short width,height; PixMapHandle thePixMap=GetGWorldPixMap(theWorld); LockPixels(thePixMap); pixBase=GetPixBaseAddr(thePixMap); rowBytes=(**thePixMap).rowBytes & 0x3fff; width=(theWorld->portRect.right)-(theWorld->portRect.left); height=(theWorld->portRect.bottom)-(theWorld->portRect.top); // clear all the counts for(xCount=0; xCount<=255; xCount++) colCount[xCount]=0; for (yCount=0; yCount<height; yCount++) { for(xCount=0; xCount<width; xCount++) { colour=(unsigned char)*(pixBase + yCount * rowBytes + xCount); colCount[colour]++; } } UnlockPixels(thePixMap); // scan through for a 0 use col for(xCount=0; xCount<=255; xCount++) { if (colCount[xCount]==0) return xCount; } return -1; // not found }
short LoadMapPICT( PicHandle pict, unsigned long mapID, unsigned long mapSizeX, unsigned long mapSizeY, TQ3StoragePixmap *bMap) { unsigned long *textureMap; unsigned long *textureMapAddr; unsigned long *pictMap; unsigned long pictMapAddr; register unsigned long row; register unsigned long col; Rect rectGW; GWorldPtr pGWorld; PixMapHandle hPixMap; unsigned long pictRowBytes; QDErr err; GDHandle oldGD; GWorldPtr oldGW; short success; mapID; /* unused argument */ /* save current port */ GetGWorld(&oldGW, &oldGD); /* create the GWorld */ SetRect(&rectGW, 0, 0, (unsigned short)mapSizeX, (unsigned short)mapSizeY); err = NewGWorld(&pGWorld, 32, &rectGW, 0, 0, useTempMem); if (err != noErr) return 0; success = 1; hPixMap = GetGWorldPixMap(pGWorld); pictMapAddr = (unsigned long)GetPixBaseAddr (hPixMap); pictRowBytes = (unsigned long)(**hPixMap).rowBytes & 0x3fff; /* put the PICT into the window */ SetGWorld(pGWorld, nil); LockPixels(hPixMap); EraseRect(&rectGW); DrawPicture(pict, &rectGW); /* allocate an area of memory for the texture */ textureMap = (unsigned long *)malloc(mapSizeX * mapSizeY * sizeof(unsigned long)); if (textureMap == NULL) { success = 0; goto bail; } /* bMap->image = (char *)textureMap; */ /* copy the PICT into the texture */ textureMapAddr = textureMap; for (row = 0L; row < mapSizeY; row++) { pictMap = (unsigned long *)(pictMapAddr + (pictRowBytes * row)); for (col = 0L; col < mapSizeX; col++) { *textureMap++ = (*pictMap++ | 0xff000000L); } } bMap->image = Q3MemoryStorage_New((const unsigned char *)textureMapAddr, mapSizeX * mapSizeY * sizeof(unsigned long)); if (bMap->image == NULL) { /* error */ success = 0; goto bail; } UnlockPixels(hPixMap); bMap->width = mapSizeX; bMap->height = mapSizeY; bMap->rowBytes = bMap->width * 4; bMap->pixelSize = 32; bMap->pixelType = kQ3PixelTypeRGB32; bMap->bitOrder = kQ3EndianBig; bMap->byteOrder = kQ3EndianBig; /* Free junk */ bail: SetGWorld(oldGW, oldGD); DisposeGWorld(pGWorld); if (textureMapAddr != NULL) free(textureMapAddr); return success; }
void MyCopyBits(PixMapHandle srcPixmap, PixMapHandle dstPixmap, Rect srcRect, Rect dstRect, short mode, int trans, int pmask, int maskx, int masky, const uint8 maskdata[]) { //I ignore destRect.right,bottom int srcRowBytes= (**srcPixmap).rowBytes & 0x1FFF, destRowBytes= (**dstPixmap).rowBytes & 0x1FFF, y1, y2, width,hight, cut, dy, maskwidth; Ptr srcAdr= GetPixBaseAddr(srcPixmap), dstAdr= GetPixBaseAddr(dstPixmap); Rect srcBounds= (**srcPixmap).bounds, dstBounds= (**dstPixmap).bounds; //check params //chech src top if( srcRect.top<srcBounds.top ){ cut= srcBounds.top-srcRect.top; srcRect.top+=cut; dstRect.top+=cut; } if( srcRect.top>srcBounds.bottom ) return; //check left if( srcRect.left <srcBounds.left ){ cut= srcBounds.left-srcRect.left; srcRect.left+= cut; dstRect.left+=cut; } if( srcRect.left>srcBounds.right ) return; //chech src bottom if( srcRect.bottom>srcBounds.bottom ){ cut= srcRect.bottom-srcBounds.bottom; srcRect.bottom-= cut; dstRect.bottom-=cut; } if( srcRect.bottom<srcBounds.top ) return; //check right if( srcRect.right >srcBounds.right ){ cut= srcRect.right-srcBounds.right; srcRect.right-= cut; srcBounds.right-= cut; } if( srcRect.right<srcBounds.left ) return; width=srcRect.right-srcRect.left; hight=srcRect.bottom-srcRect.top; //check dest //check top if( dstRect.top <dstBounds.top ){ cut= dstBounds.top-dstRect.top; srcRect.top+=cut; dstRect.top+=cut; } if( dstRect.top>dstBounds.bottom ) return; //check hight if( dstRect.top+hight>dstBounds.bottom ){ hight=dstBounds.bottom-dstRect.top; srcRect.bottom=srcRect.top+hight; } //check left if( dstRect.left <dstBounds.left ){ cut= dstBounds.left-dstRect.left; srcRect.left+= cut; dstRect.left+=cut; } if( dstRect.left>dstBounds.right ) return; //check width if( dstRect.left+width>dstBounds.right ) width=dstBounds.right-dstRect.left; switch( mode ){ case 0://srcCopy case 0x10: { pascal void (*func)(const void* srcPtr, void * destPtr,Size byteCount); if( pmask==0xFF ) func=BlockMoveData; else func= BlockMoveData_gmode; if( srcRect.top >= dstRect.top ){ for( y1=srcRect.top, y2=dstRect.top; y1<srcRect.bottom; y1++,y2++ ){ func( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width); } }else{ for( y1=srcRect.bottom-1, y2=dstRect.top+hight-1; y1>=srcRect.top; y1--, y2-- ){ func( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width); } } } break; case 0x11://transparent if( srcRect.top >= dstRect.top ){ for( y1=srcRect.top, y2=dstRect.top; y1<srcRect.bottom; y1++,y2++ ){ BlockMoveData_transparent( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width, pmask, trans); } }else{ for( y1=srcRect.bottom-1, y2=dstRect.top+hight-1; y1>=srcRect.top; y1--, y2-- ){ BlockMoveData_transparent( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width, pmask, trans); } } break; case 0x30: case 0x31: // masking & transparent //sherry op=0x62 if( maskx<=0 ) break; maskwidth= ((maskx+7)& ~0x07)/8; //kiriage if( srcRect.top >= dstRect.top ){ for( y1=srcRect.top, y2=dstRect.top, dy=0; y1<srcRect.bottom; y1++,y2++,dy++,dy%=masky ){ BlockMoveData_masktrans( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width, trans, maskx, &maskdata[maskwidth*dy]); } }else{ for( y1=srcRect.bottom-1, y2=dstRect.top+hight-1,dy=hight-1; y1>=srcRect.top; y1--, y2--,dy+=masky-1, dy%=masky ){ BlockMoveData_masktrans( &(srcAdr[y1*srcRowBytes+srcRect.left]), &(dstAdr[y2*destRowBytes+dstRect.left]), width, trans, maskx, &maskdata[maskwidth*dy]); } } break; } }
static SDL_Surface *ROM_SetVideoMode(_THIS, SDL_Surface *current, int width, int height, int bpp, Uint32 flags) { Rect wrect, orect; #if TARGET_API_MAC_CARBON Rect tmprect; #endif /* Free any previous video mode */ ROM_UnsetVideoMode(this, current); /* Create the ROM window and SDL video surface */ current->flags = 0; /* Clear flags */ current->w = width; current->h = height; SetRect(&wrect, 0, 0, width, height); if ( SDL_Window ) { /* If we recreate the window, don't move it around */ #if TARGET_API_MAC_CARBON orect = *GetWindowPortBounds(SDL_Window, &tmprect); #else orect = SDL_Window->portRect; #endif OffsetRect(&wrect, orect.left, orect.top); } else { /* Center the window the first time we show it */ OffsetRect(&wrect, (SDL_modelist[0]->w-width)/2, (SDL_modelist[0]->h-height)/2); } #if defined(__MACOSX__) && !USE_QUICKTIME /* Hum.. fullscreen mode is broken */ flags &= ~SDL_FULLSCREEN; #endif if ( (flags & SDL_FULLSCREEN) == SDL_FULLSCREEN ) { /* Create the fullscreen window and use screen bits */ current->flags |= SDL_HWSURFACE|SDL_FULLSCREEN; if ( SDL_Window ) { DisposeWindow(SDL_Window); } #if USE_QUICKTIME BeginFullScreen(&fullscreen_ctx, nil, 0,0, &SDL_Window, nil, 0); #else SDL_Window = NewCWindow(nil, &wrect, "\p", true, plainDBox, (WindowPtr)-1, false, 0); ROM_HideMenuBar(this); #endif current->pitch = (**(**SDL_Display).gdPMap).rowBytes & 0x3FFF; current->pixels = (**(**SDL_Display).gdPMap).baseAddr; this->UpdateRects = ROM_DirectUpdate; } else { GWorldPtr memworld; PixMapHandle pixmap; int style; style = noGrowDocProc; if ( flags & SDL_NOFRAME ) { style = plainDBox; current->flags |= SDL_NOFRAME; } else if ( flags & SDL_RESIZABLE ) { style = zoomDocProc; current->flags |= SDL_RESIZABLE; } if ( SDL_Window && (style == current_style) ) { /* Resize existing window, if necessary */ if ( ((orect.right-orect.left) != width) || ((orect.bottom-orect.top) != height) ) { SizeWindow(SDL_Window, width, height, false); } } else { /* Recreate the window in the new style */ if ( SDL_Window ) { DisposeWindow(SDL_Window); } SDL_Window = NewCWindow(nil, &wrect, "\p", true, style, (WindowPtr)-1, true, 0); /* Set the window title, if any */ { char *title; SDL_WM_GetCaption(&title, NULL); if ( title ) { Mac_SetCaption(this, title, NULL); } } } current_style = style; SetPalette(SDL_Window, SDL_CPal, false); ActivatePalette(SDL_Window); if ( NewGWorld(&memworld, 0, #if TARGET_API_MAC_CARBON GetWindowPortBounds(SDL_Window, &tmprect), #else &SDL_Window->portRect, #endif SDL_CTab, nil, 0) != noErr ) { SDL_SetError("NewGWorld() failed"); return(NULL); } SetWRefCon(SDL_Window, (long)memworld); pixmap = GetGWorldPixMap(memworld); LockPixels(pixmap); current->pitch = (**pixmap).rowBytes & 0x3FFF; current->pixels = GetPixBaseAddr(pixmap); this->UpdateRects = ROM_WindowUpdate; } SetPortWindowPort(SDL_Window); SelectWindow(SDL_Window); /* Handle OpenGL support */ if ( flags & SDL_OPENGL ) { if ( Mac_GL_Init(this) == 0 ) { current->flags |= SDL_OPENGL; } else { current = NULL; } } if ( (flags & SDL_HWPALETTE) && (flags & SDL_FULLSCREEN) ) current->flags |= SDL_HWPALETTE; /* We're live! */ return(current); }
void ofQtVideoSaver::addFrame(unsigned char* data, float frameLengthInSecs){ if (!bSetupForRecordingMovie) return; /* Save the current GWorld and set the offscreen GWorld as current. ================================================================ */ GetGWorld (&pSavedPort, &hSavedDevice); SetGWorld (pMovieGWorld, NULL); Ptr gwAddress, gwAddressBase; long gwWidth; gwAddressBase = GetPixBaseAddr( GetGWorldPixMap( pMovieGWorld ) ); /* Get head address of offscreen */ gwWidth = ( **GetGWorldPixMap( pMovieGWorld ) ).rowBytes & 0x3fff; /* Get with of offscreen */ ///gwAddress = gwAddressBase + ( x * 3 ) + ( y * gwWidth ); /* Get adress for current pixel */ int myWidth = w*3; unsigned char * myData = data; #ifdef TARGET_OSX //--------------------------------------------------------------- // mac's have 32 bit no matter what, so we do it like this: for (int i = 0; i < h; i++){ gwAddress = gwAddressBase + i * gwWidth; myData = data + i * myWidth; for (int j = 0; j < w; j++){ memcpy(gwAddress+1, myData, 3); /*gwAddress[1] = myData[2]; gwAddress[2] = myData[1]; gwAddress[3] = myData[0];*/ gwAddress+= 4; myData+= 3; } } #endif #ifdef TARGET_WIN32 for (int i = 0; i < h; i++){ gwAddress = gwAddressBase + i * gwWidth; myData = data + i * myWidth; memcpy(gwAddress, myData, myWidth); } #endif /* Compress the pixel map that has just been drawn on. Also resize and fill in the image description. Resulting image size can be discovered by consulting the image description field dataSize. ================================================================ */ osErr = CompressImage ( pixMapHandle, /* the pixel map of the offscreen img */ &rect, /* portion of the image to compress */ codecQualityLevel, /* quality as set via default or #defines */ codecType, /* same codec specifier as above */ hImageDescription, /* the created image description. */ pCompressedData /* ptr to bufr that receives cmp image*/ ); if (osErr != noErr) { printf ("CompressImage failed %d\n", osErr); goto bail; } /* Add the compressed image to the movie. ====================================== */ // converting frame length to a time duration; float timeForQt = 1 / frameLengthInSecs; osErr = AddMediaSample ( media, /* the media to add the image to. */ hCompressedData, /* the compressed image to add. */ 0, /* byte offs into data to begin readg */ (**hImageDescription).dataSize,/* num bytes to be copied into media. */ 600 / timeForQt, /* duration of the frame (media time) */ (SampleDescriptionHandle) hImageDescription, /* image desc cast to */ /* a sample description since both */ /* both structures start with same */ /* fields. */ 1, /* num samples in the data buffer. */ 0, /* default flags */ NULL /* ptr to receive media time in which */ /* the image was added. */ ); if (osErr != noErr) { printf ("AddMediaSample failed %d\n", osErr); //goto bail; } return; bail: SetGWorld (pSavedPort, hSavedDevice); if (hImageDescription != NULL) DisposeHandle ((Handle) hImageDescription); if (hCompressedData != NULL) DisposeHandle (hCompressedData); if (pMovieGWorld != NULL) DisposeGWorld (pMovieGWorld); }
OSStatus DrawPage(PrintingLogicPtr printJob,CGContextRef printingContext) { OSStatus status = noErr; Rect dstRect = { 0, 0, 0, 0 }; Rect srcRect = { 0, 0, 0, 0 }; static CGColorSpaceRef colorspace = NULL; if (colorspace == NULL) { // Get the Systems Profile for the main display CMProfileRef sysprof = NULL; if (CMGetSystemProfile(&sysprof) == noErr) { // Create a colorspace with the systems profile colorspace = CGColorSpaceCreateWithPlatformColorSpace(sysprof); CMCloseProfile(sysprof); } else colorspace = CGColorSpaceCreateDeviceRGB(); } dstRect.top = printJob->offsetHeight; dstRect.left = printJob->offsetWidth; dstRect.right = printJob->width*printJob->scaleW + printJob->offsetWidth; dstRect.bottom = printJob->height*printJob->scaleH + printJob->offsetHeight; if (printJob->formBitMap != nil) { srcRect.right = printJob->width; srcRect.bottom = printJob->height; HLock((Handle)stPixMap); (*stPixMap)->baseAddr = (void *) printJob->formBitMap; (*stPixMap)->rowBytes = (((((printJob->width * printJob->depth) + 31) / 32) * 4) & 0x1FFF) | 0x8000; (*stPixMap)->bounds = srcRect; (*stPixMap)->pixelSize = printJob->depth; if (printJob->depth<=8) { (*stPixMap)->cmpSize = printJob->depth; (*stPixMap)->cmpCount = 1; } else if (printJob->depth==16) { (*stPixMap)->cmpSize = 5; (*stPixMap)->cmpCount = 3; } else if (printJob->depth==32) { (*stPixMap)->cmpSize = 8; (*stPixMap)->cmpCount = 3; } { PixMapHandle thePix; int pitch; CGDataProviderRef provider; CGImageRef image; CGRect clip; Ptr baseAddr; if (printJob->depth == 32) { pitch = (((((printJob->width * printJob->depth) + 31) / 32) * 4) & 0x1FFF); baseAddr = (void *) printJob->formBitMap; } else { if (printJob->aGWorld == NULL) NewGWorld(&printJob->aGWorld, 32, &srcRect, stColorTable, NULL, keepLocal+useTempMem+pixelsLocked); thePix = GetGWorldPixMap (printJob->aGWorld); CopyBits((BitMap *) *stPixMap, (BitMap *) *thePix, &srcRect, &srcRect, srcCopy, NULL); pitch = GetPixRowBytes(thePix); baseAddr = GetPixBaseAddr(thePix); } provider = CGDataProviderCreateDirectAccess((void*)baseAddr, pitch * (srcRect.bottom-srcRect.top), &gProviderCallbacks); image = CGImageCreate( srcRect.right-srcRect.left, srcRect.bottom-srcRect.top, 8 /* bitsPerComponent */, 32 /* bitsPerPixel */, pitch, colorspace, kCGImageAlphaNoneSkipFirst | (printJob->depth==32 ? kCGBitmapByteOrder32Host : 0), provider, NULL, 0, kCGRenderingIntentDefault); clip = CGRectMake(dstRect.left+(printJob->pageRect.left-printJob->paperRect.left), (printJob->paperRect.bottom-printJob->pageRect.bottom) + (printJob->pageRect.bottom - printJob->pageRect.top) - (dstRect.bottom-dstRect.top) - dstRect.top, dstRect.right-dstRect.left, dstRect.bottom-dstRect.top); CGContextDrawImage(printingContext, clip, image); CGContextFlush(printingContext); CGImageRelease(image); CGDataProviderRelease(provider); } HUnlock((Handle)stPixMap); } else { } #if TARGET_API_MAC_CARBON if (printJob->allowPostscript && printJob->postscriptLength > 0) { CGDataProviderRef provider,providerFakeImage; CGImageRef image,imageFake; CGRect clip; static long dirt=0xBBBBBBBB; //PMPrinter currentPrinter = NULL; //CFArrayRef mimeTypes; //status = PMSessionGetCurrentPrinter(printJob->printSession,¤tPrinter); //status = PMPrinterGetMimeTypes(currentPrinter,printJob->printSettings,&mimeTypes); provider = CGDataProviderCreateDirectAccess((void*)printJob->postscript,printJob->postscriptLength, &gProviderCallbacks); providerFakeImage = CGDataProviderCreateDirectAccess((void*)&dirt,4, &gProviderCallbacks); //OK make fake image using tiny bit of data imageFake = CGImageCreate(1, 1, 8 /* bitsPerComponent */, 32 /* bitsPerPixel */, 4, colorspace, kCGImageAlphaNoneSkipFirst , providerFakeImage, NULL, 0, kCGRenderingIntentDefault); image = PMCGImageCreateWithEPSDataProvider(provider,imageFake); dstRect.top = 0; dstRect.left = 0; dstRect.bottom = CGImageGetHeight(image); dstRect.right = CGImageGetWidth(image); clip = CGRectMake(dstRect.left+(printJob->pageRect.left-printJob->paperRect.left), (printJob->paperRect.bottom-printJob->pageRect.bottom) + (printJob->pageRect.bottom - printJob->pageRect.top) - (dstRect.bottom-dstRect.top) - dstRect.top, dstRect.right-dstRect.left, dstRect.bottom-dstRect.top); //PMPrinterPrintWithProvider CGContextDrawImage(printingContext, clip, image); CGContextFlush(printingContext); CGImageRelease(image); CGImageRelease(imageFake); CGDataProviderRelease(provider); CGDataProviderRelease(providerFakeImage); } #else return PrError(); #endif return status; } // DrawPage
bool macvdCamera::initCamera(int width, int height, bool colour) { if (cameraID < 0) return false; this->width =width; this->height=height; this->colour=colour; this->bytes =(colour?3:1); OSErr err; if(err = vdgRequestSettings(pVdg)) { printf("camera setup cancelled\n"); //printf("vdgRequestSettings err=%d\n", err); return false; } long nameLength = 256; if (err = vdgGetDeviceNameAndFlags(pVdg, cameraName, &nameLength, NULL)) { sprintf(cameraName,"unknown camera"); } long milliSecPerFrame; Fixed framerate; long bytesPerSecond; if (err = vdgGetDataRate(pVdg, &milliSecPerFrame, &framerate, &bytesPerSecond)) { fps = 30; } else fps = (int)(framerate/65536); //fps = vdgGetFrameRate(pVdg); //printf("%d\n",fps); if(err = vdgPreflightGrabbing(pVdg)) { //printf("vdgPreflightGrabbing err=%d\n", err); return false; } vdImageDesc = (ImageDescriptionHandle)NewHandle(0); if (err = vdgGetImageDescription( pVdg, vdImageDesc)) { //printf("vdgGetImageDescription err=%d\n", err); return false; } this->width = (*vdImageDesc)->width; this->height = (*vdImageDesc)->height; //printf("%dx%d\n",this->width,this->height); dstPortBounds.left = 0; dstPortBounds.right = this->width; dstPortBounds.top = 0; dstPortBounds.bottom = this->height; if (err = createOffscreenGWorld( &dstPort, //k8IndexedGrayPixelFormat, //kYVYU422PixelFormat, //k24RGBPixelFormat, //kYUV420CodecType, k422YpCbCr8CodecType, &dstPortBounds)) { printf("createOffscreenGWorld err=%d\n", err); return false; } // Get buffer from GWorld pDstData = GetPixBaseAddr(GetGWorldPixMap(dstPort)); dstDataSize = GetPixRowBytes(GetGWorldPixMap(dstPort)) * (dstPortBounds.bottom - dstPortBounds.top); dstDisplayBounds = dstPortBounds; // Set the decompression destination to the offscreen GWorld if (err = vdgSetDestination( pVdg, dstPort )) { //printf("vdgSetDestination err=%d\n", err); return false; } buffer = new unsigned char[width*height*bytes]; return true; }
PRIVATE GWorldPtr gworld_from_surface (SDL_Surface *surfp) { GWorldPtr retval; retval = NULL; if (surfp) { QDErr err; int surf_depth; CTabHandle ctab; ctab = NULL; surf_depth = SDL_Surface_depth (surfp); switch (surf_depth) { case 8: ctab = ctab_from_surface (surfp); break; case 32: break; default: warning_unexpected ("surf_depth = %d", surf_depth); surf_depth = 0; break; } if (surf_depth) { int n_lines; int pixels_per_line; Rect r; n_lines = SDL_Surface_height (surfp); pixels_per_line = SDL_Surface_width (surfp); r.top = CWC (0); r.left = CWC (0); r.bottom = CW (n_lines); r.right = CW (pixels_per_line); { CGrafPtr save_port; GDHandle save_device; GetGWorld (&save_port, &save_device); save_port = MR (save_port); save_device = MR (save_device); err = NewGWorld (&retval, surf_depth, &r, ctab, NULL, keepLocal); SetGWorld (save_port, save_device); } if (retval) { PixMapHandle pm; retval = MR (retval); pm = GetGWorldPixMap (retval); LockPixels (pm); SDL_LockSurface (surfp); switch (surf_depth) { case 8: { uint8 *ip, *eip; uint8 *op; int rowbytes; int pitch; pitch = SDL_Surface_pitch (surfp); rowbytes = PIXMAP_ROWBYTES (pm); ip = SDL_Surface_pixels (surfp); op = (typeof (op)) GetPixBaseAddr (pm); eip = ip + n_lines * pitch; for (; ip != eip; ip += pitch, op += rowbytes) memcpy (op, ip, rowbytes); break; } case 32: { sdl_pixel24 *ip; mac_pixel32 *op; op = (typeof (op)) GetPixBaseAddr (pm); ip = SDL_Surface_pixels (surfp); memcpy (op, ip, n_lines * pixels_per_line * sizeof *op); break; } default: warning_unexpected ("surf_depth = %d", surf_depth); break; } SDL_UnlockSurface (surfp); UnlockPixels (pm); } } } return retval; }
TQ3TextureObject QutTexture_CreateTextureObjectFromPixmap(PixMapHandle thePixMap, TQ3PixelType pixelType, TQ3Boolean wantMipMaps) { TQ3Uns32 x, y, theWidth, theHeight, rowBytes, pixelBytes; TQ3TextureObject qd3dTextureObject = NULL; TQ3StorageObject qd3dMemoryStorage; TQ3StoragePixmap qd3dPixMap; TQ3Mipmap qd3dMipMap; UInt16 *pixelPtr; UInt8 *baseAddr; OSType pixelFormat; TQ3Endian byteOrder; // Get the details we need from the PixMap NoPurgePixels(thePixMap); LockPixels(thePixMap); theWidth = (*thePixMap)->bounds.right - (*thePixMap)->bounds.left; theHeight = (*thePixMap)->bounds.bottom - (*thePixMap)->bounds.top; rowBytes = (*thePixMap)->rowBytes & 0x7FFF; pixelBytes = (*thePixMap)->pixelSize / 8; baseAddr = (UInt8 *) GetPixBaseAddr(thePixMap); #if OLDPIXMAPSTRUCT pixelFormat = 0; #else pixelFormat = (*thePixMap)->pixelFormat; #endif // If this is a 16 bit alpha channel texture, set the alpha bits. // We assume that black is transparent. if (pixelType == kQ3PixelTypeARGB16) { for (y = 0; y < theHeight; y++) { for (x = 0; x < theWidth; x++) { pixelPtr = (UInt16 *) (baseAddr + (y * rowBytes) + (x * 2)); if (*pixelPtr != 0x0000) *pixelPtr |= (1 << 15); } } } // Set the byte order if ( (pixelFormat == k32BGRAPixelFormat) || (pixelFormat == k16LE555PixelFormat) ) { byteOrder = kQ3EndianLittle; } else { byteOrder = kQ3EndianBig; } // Create a storage object based on the GWorld qd3dMemoryStorage = Q3MemoryStorage_New(baseAddr, theHeight * rowBytes); if (qd3dMemoryStorage != NULL) { // Create the appropriate type of texture. Note that if mip-maps are // required, we create a QD3D PixMap. This might seem back to front, // but QD3D automatically creates mip-maps for PixMaps. // // If mip-maps are not required, we create a QD3D MipMap by hand and // signal that we don't want any other mip-maps to be created. // // We need to work around a bug in Q3MipmapTexture_New: if the rowByte // for the pixel exactly matches the size of each row, the texture is // distorted - we can fix this by using Q3PixmapTexture_New instead. if (wantMipMaps || (rowBytes == theWidth * pixelBytes)) { // Create a PixMap from the GWorld data qd3dPixMap.image = qd3dMemoryStorage; qd3dPixMap.width = theWidth; qd3dPixMap.height = theHeight; qd3dPixMap.rowBytes = rowBytes; qd3dPixMap.pixelSize = (pixelType == kQ3PixelTypeARGB32 || pixelType == kQ3PixelTypeRGB32) ? 32 : 16; qd3dPixMap.pixelType = pixelType; qd3dPixMap.bitOrder = byteOrder; qd3dPixMap.byteOrder = byteOrder; qd3dTextureObject = Q3PixmapTexture_New(&qd3dPixMap); } else { // Create a MipMap from the GWorld data qd3dMipMap.image = qd3dMemoryStorage; qd3dMipMap.useMipmapping = kQ3False; qd3dMipMap.pixelType = pixelType; qd3dMipMap.bitOrder = byteOrder; qd3dMipMap.byteOrder = byteOrder; qd3dMipMap.reserved = 0; qd3dMipMap.mipmaps[0].width = theWidth; qd3dMipMap.mipmaps[0].height = theHeight; qd3dMipMap.mipmaps[0].rowBytes = rowBytes; qd3dMipMap.mipmaps[0].offset = 0; qd3dTextureObject = Q3MipmapTexture_New(&qd3dMipMap); } Q3Object_Dispose(qd3dMemoryStorage); } // Clean up and return UnlockPixels(thePixMap); return(qd3dTextureObject); }
static OSErr data_proc (SGChannel c, Ptr p, long len, long *offset, long chRefCon, TimeValue time, short writeType, long refCon) { GstOSXVideoSrc *self; gint fps_n, fps_d; GstClockTime duration, timestamp, latency; CodecFlags flags; ComponentResult err; PixMapHandle hPixMap; Rect portRect; int pix_rowBytes; void *pix_ptr; int pix_height; int pix_size; self = GST_OSX_VIDEO_SRC (refCon); if (self->buffer != NULL) { gst_buffer_unref (self->buffer); self->buffer = NULL; } err = DecompressSequenceFrameS (self->dec_seq, p, len, 0, &flags, NULL); if (err != noErr) { GST_ERROR_OBJECT (self, "DecompressSequenceFrameS returned %d", (int) err); return err; } hPixMap = GetGWorldPixMap (self->world); LockPixels (hPixMap); GetPortBounds (self->world, &portRect); pix_rowBytes = (int) GetPixRowBytes (hPixMap); pix_ptr = GetPixBaseAddr (hPixMap); pix_height = (portRect.bottom - portRect.top); pix_size = pix_rowBytes * pix_height; GST_DEBUG_OBJECT (self, "num=%5d, height=%d, rowBytes=%d, size=%d", self->seq_num, pix_height, pix_rowBytes, pix_size); fps_n = FRAMERATE; fps_d = 1; duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n); latency = duration; timestamp = gst_clock_get_time (GST_ELEMENT_CAST (self)->clock); timestamp -= gst_element_get_base_time (GST_ELEMENT_CAST (self)); if (timestamp > latency) timestamp -= latency; else timestamp = 0; self->buffer = gst_buffer_new_and_alloc (pix_size); GST_BUFFER_OFFSET (self->buffer) = self->seq_num; GST_BUFFER_TIMESTAMP (self->buffer) = timestamp; memcpy (GST_BUFFER_DATA (self->buffer), pix_ptr, pix_size); self->seq_num++; UnlockPixels (hPixMap); return noErr; }
//////////////////////////////////////////////////////////////////////////////// // virtual bool LLMediaImplQuickTime::sizeChanged() { if ( ! mMovieHandle ) return false; // sanitize size of movie Rect movie_rect; setMovieBoxEnhanced( &movie_rect ); // we need this later int width = ( movie_rect.right - movie_rect.left ); int height = ( movie_rect.bottom - movie_rect.top ); std::cout << "LLMEDIA> size changed to " << width << " x " << height << std::endl; setMediaSize( width, height ); // media depth won't change int depth_bits = getMediaDepth() * 8; GWorldPtr old_gworld_handle = mGWorldHandle; if (old_gworld_handle) { GWorldFlags result = UpdateGWorld( &mGWorldHandle, depth_bits, &movie_rect, NULL, NULL, 0 ); if ( gwFlagErr == result ) { // TODO: unrecoverable?? throw exception? return something? return false; } } else { OSErr result = NewGWorld( &mGWorldHandle, depth_bits, &movie_rect, NULL, NULL, keepLocal | pixelsLocked ); if ( noErr != result ) { // ATODO: unrecoverable?? throw exception? return something? return false; } // clear memory in GWorld to avoid random screen visual fuzz from uninitialized texture data if ( mGWorldHandle ) { PixMapHandle pix_map_handle = GetGWorldPixMap( mGWorldHandle ); unsigned char* ptr = ( unsigned char* )GetPixBaseAddr( pix_map_handle ); memset( ptr, 0x00, height * QTGetPixMapHandleRowBytes( pix_map_handle ) ); } } // point movie at GWorld if it's new if ( mMovieHandle && ! old_gworld_handle ) { SetMovieGWorld( mMovieHandle, mGWorldHandle, GetGWorldDevice ( mGWorldHandle ) ); } // update movie controller if ( mMovieController ) { MCSetControllerPort( mMovieController, mGWorldHandle ); MCPositionController( mMovieController, &movie_rect, &movie_rect, mcTopLeftMovie | mcPositionDontInvalidate ); MCMovieChanged( mMovieController, mMovieHandle ); } // Emit event with size change so the calling app knows about it too LLMediaEvent event( this ); mEventEmitter.update( &LLMediaObserver::onMediaSizeChange, event ); return true; }
/* * PsychQTGetTextureFromMovie() -- Create an OpenGL texture map from a specific videoframe from given movie object. * * win = Window pointer of onscreen window for which a OpenGL texture should be created. * moviehandle = Handle to the movie object. * checkForImage = true == Just check if new image available, false == really retrieve the image, blocking if necessary. * timeindex = When not in playback mode, this allows specification of a requested frame by presentation time. * If set to -1, or if in realtime playback mode, this parameter is ignored and the next video frame is returned. * out_texture = Pointer to the Psychtoolbox texture-record where the new texture should be stored. * presentation_timestamp = A ptr to a double variable, where the presentation timestamp of the returned frame should be stored. * * Returns true (1) on success, false (0) if no new image available, -1 if no new image available and there won't be any in future. */ int PsychQTGetTextureFromMovie(PsychWindowRecordType *win, int moviehandle, int checkForImage, double timeindex, PsychWindowRecordType *out_texture, double *presentation_timestamp) { static TimeValue myNextTimeCached = -2; static TimeValue nextFramesTimeCached = -2; TimeValue myCurrTime; TimeValue myNextTime; TimeValue nextFramesTime=0; short myFlags; OSType myTypes[1]; OSErr error = noErr; Movie theMovie; CVOpenGLTextureRef newImage = NULL; QTVisualContextRef theMoviecontext; unsigned int failcount=0; float lowerLeft[2]; float lowerRight[2]; float upperRight[2]; float upperLeft[2]; GLuint texid; Rect rect; float rate; double targetdelta, realdelta, frames; PsychRectType outRect; if (!PsychIsOnscreenWindow(win)) { PsychErrorExitMsg(PsychError_user, "Need onscreen window ptr!!!"); } // Activate OpenGL context of target window: PsychSetGLContext(win); // Explicitely disable Apple's Client storage extensions. For now they are not really useful to us. glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE); if (moviehandle < 0 || moviehandle >= PSYCH_MAX_MOVIES) { PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided."); } if ((timeindex!=-1) && (timeindex < 0 || timeindex >= 10000.0)) { PsychErrorExitMsg(PsychError_user, "Invalid timeindex provided."); } if (NULL == out_texture && !checkForImage) { PsychErrorExitMsg(PsychError_internal, "NULL-Ptr instead of out_texture ptr passed!!!"); } // Fetch references to objects we need: theMovie = movieRecordBANK[moviehandle].theMovie; theMoviecontext = movieRecordBANK[moviehandle].QTMovieContext; if (theMovie == NULL) { PsychErrorExitMsg(PsychError_user, "Invalid moviehandle provided. No movie associated with this handle."); } // Check if end of movie is reached. Rewind, if so... if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag > 0) { if (GetMovieRate(theMovie)>0) { GoToBeginningOfMovie(theMovie); } else { GoToEndOfMovie(theMovie); } } // Is movie actively playing (automatic async playback, possibly with synced sound)? // If so, then we ignore the 'timeindex' parameter, because the automatic playback // process determines which frames should be delivered to PTB when. This function will // simply wait or poll for arrival/presence of a new frame that hasn't been fetched // in previous calls. if (0 == GetMovieRate(theMovie)) { // Movie playback inactive. We are in "manual" mode: No automatic async playback, // no synced audio output. The user just wants to manually fetch movie frames into // textures for manual playback in a standard Matlab-loop. // First pass - checking for new image? if (checkForImage) { // Image for specific point in time requested? if (timeindex >= 0) { // Yes. We try to retrieve the next possible image for requested timeindex. myCurrTime = (TimeValue) ((timeindex * (double) GetMovieTimeScale(theMovie)) + 0.5f); } else { // No. We just retrieve the next frame, given the current movie time. myCurrTime = GetMovieTime(theMovie, NULL); } // Retrieve timeindex of the closest image sample after myCurrTime: myFlags = nextTimeStep + nextTimeEdgeOK; // We want the next frame in the movie's media. myTypes[0] = VisualMediaCharacteristic; // We want video samples. GetMovieNextInterestingTime(theMovie, myFlags, 1, myTypes, myCurrTime, FloatToFixed(1), &myNextTime, &nextFramesTime); error = GetMoviesError(); if (error != noErr) { PsychErrorExitMsg(PsychError_internal, "Failed to fetch texture from movie for given timeindex!"); } // Found useful event? if (myNextTime == -1) { if (PsychPrefStateGet_Verbosity() > 3) printf("PTB-WARNING: Bogus timevalue in movie track for movie %i. Trying to keep going.\n", moviehandle); // No. Just push timestamp to current time plus a little bit in the hope // this will get us unstuck: myNextTime = myCurrTime + (TimeValue) 1; nextFramesTime = (TimeValue) 0; } if (myNextTime != myNextTimeCached) { // Set movies current time to myNextTime, so the next frame will be fetched from there: SetMovieTimeValue(theMovie, myNextTime); // nextFramesTime is the timeindex to which we need to advance for retrieval of next frame: (see code below) nextFramesTime=myNextTime + nextFramesTime; if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Current timevalue in movie track for movie %i is %lf secs.\n", moviehandle, (double) myNextTime / (double) GetMovieTimeScale(theMovie)); if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Next timevalue in movie track for movie %i is %lf secs.\n", moviehandle, (double) nextFramesTime / (double) GetMovieTimeScale(theMovie)); // Cache values for 2nd pass: myNextTimeCached = myNextTime; nextFramesTimeCached = nextFramesTime; } else { // Somehow got stuck? Do nothing... if (PsychPrefStateGet_Verbosity() > 5) printf("PTB-DEBUG: Seem to be a bit stuck at timevalue [for movie %i] of %lf secs. Nudging a bit forward...\n", moviehandle, (double) myNextTime / (double) GetMovieTimeScale(theMovie)); // Nudge the timeindex a bit forware in the hope that this helps: SetMovieTimeValue(theMovie, GetMovieTime(theMovie, NULL) + 1); } } else { // This is the 2nd pass: Image fetching. Use cached values from first pass: // Caching in a static works because we're always called immediately for 2nd // pass after successfull return from 1st pass, and we're not multi-threaded, // i.e., don't need to be reentrant or thread-safe here: myNextTime = myNextTimeCached; nextFramesTime = nextFramesTimeCached; myNextTimeCached = -2; } } else { // myNextTime unavailable if in autoplayback-mode: myNextTime=-1; } // Presentation timestamp requested? if (presentation_timestamp) { // Already available? if (myNextTime==-1) { // Retrieve the exact presentation timestamp of the retrieved frame (in movietime): myFlags = nextTimeStep + nextTimeEdgeOK; // We want the next frame in the movie's media. myTypes[0] = VisualMediaCharacteristic; // We want video samples. // We search backward for the closest available image for the current time. Either we get the current time // if we happen to fetch a frame exactly when it becomes ready, or we get a bit earlier timestamp, which is // the optimal presentation timestamp for this frame: GetMovieNextInterestingTime(theMovie, myFlags, 1, myTypes, GetMovieTime(theMovie, NULL), FloatToFixed(-1), &myNextTime, NULL); } // Convert pts (in Quicktime ticks) to pts in seconds since start of movie and return it: *presentation_timestamp = (double) myNextTime / (double) GetMovieTimeScale(theMovie); } // Allow quicktime visual context task to do its internal bookkeeping and cleanup work: if (theMoviecontext) QTVisualContextTask(theMoviecontext); // Perform decompress-operation: if (checkForImage) MoviesTask(theMovie, 0); // Should we just check for new image? If so, just return availability status: if (checkForImage) { if (PSYCH_USE_QT_GWORLDS) { // We use GWorlds. In this case we either suceed immediately due to the // synchronous nature of GWorld rendering, or we fail completely at end // of non-looping movie: if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag == 0) { // No new frame available and there won't be any in the future, because this is a non-looping // movie that has reached its end. return(-1); } // Is this the special case of a movie without video, but only sound? In that case, // we always return a 'false' because there ain't no image to return. if (movieRecordBANK[moviehandle].QTMovieGWorld == NULL) return(false); // Success! return(true); } // Code which uses QTVisualContextTasks... if (QTVisualContextIsNewImageAvailable(theMoviecontext, NULL)) { // New frame ready! return(true); } else if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag == 0) { // No new frame available and there won't be any in the future, because this is a non-looping // movie that has reached its end. return(-1); } else { // No new frame available yet: return(false); } } if (!PSYCH_USE_QT_GWORLDS) { // Blocking wait-code for non-GWorld mode: // Try up to 1000 iterations for arrival of requested image data in wait-mode: failcount=0; while ((failcount < 1000) && !QTVisualContextIsNewImageAvailable(theMoviecontext, NULL)) { PsychWaitIntervalSeconds(0.005); MoviesTask(theMovie, 0); failcount++; } // No new frame available and there won't be any in the future, because this is a non-looping // movie that has reached its end. if ((failcount>=1000) && IsMovieDone(theMovie) && (movieRecordBANK[moviehandle].loopflag == 0)) { return(-1); } // Fetch new OpenGL texture with the new movie image frame: error = QTVisualContextCopyImageForTime(theMoviecontext, kCFAllocatorDefault, NULL, &newImage); if ((error!=noErr) || newImage == NULL) { PsychErrorExitMsg(PsychError_internal, "OpenGL<->Quicktime texture fetch failed!!!"); } // Disable client storage, if it was enabled: glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE); // Build a standard PTB texture record: CVOpenGLTextureGetCleanTexCoords (newImage, lowerLeft, lowerRight, upperRight, upperLeft); texid = CVOpenGLTextureGetName(newImage); // Assign texture rectangle: PsychMakeRect(outRect, upperLeft[0], upperLeft[1], lowerRight[0], lowerRight[1]); // Set texture orientation as if it were an inverted Offscreen window: Upside-down. out_texture->textureOrientation = (CVOpenGLTextureIsFlipped(newImage)) ? 3 : 4; // Assign OpenGL texture id: out_texture->textureNumber = texid; // Store special texture object as part of the PTB texture record: out_texture->targetSpecific.QuickTimeGLTexture = newImage; } else { // Synchronous texture fetch code for GWorld rendering mode: // At this point, the GWorld should contain the source image for creating a // standard OpenGL texture: // Disable client storage, if it was enabled: glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_FALSE); // Build a standard PTB texture record: // Assign texture rectangle: GetMovieBox(theMovie, &rect); // Hack: Need to extend rect by 4 pixels, because GWorlds are 4 pixels-aligned via // image row padding: rect.right = rect.right + 4; PsychMakeRect(out_texture->rect, rect.left, rect.top, rect.right, rect.bottom); // Set NULL - special texture object as part of the PTB texture record: out_texture->targetSpecific.QuickTimeGLTexture = NULL; // Set texture orientation as if it were an inverted Offscreen window: Upside-down. out_texture->textureOrientation = 3; // Setup a pointer to our GWorld as texture data pointer: out_texture->textureMemorySizeBytes = 0; // Quicktime textures are aligned on 4 Byte boundaries: out_texture->textureByteAligned = 4; // Lock GWorld: if(!LockPixels(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld))) { // Locking surface failed! We abort. PsychErrorExitMsg(PsychError_internal, "PsychQTGetTextureFromMovie(): Locking GWorld pixmap surface failed!!!"); } // This will retrieve an OpenGL compatible pointer to the GWorlds pixel data and assign it to our texmemptr: out_texture->textureMemory = (GLuint*) GetPixBaseAddr(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld)); // Let PsychCreateTexture() do the rest of the job of creating, setting up and // filling an OpenGL texture with GWorlds content: PsychCreateTexture(out_texture); // Undo hack from above after texture creation: Now we need the real width of the // texture for proper texture coordinate assignments in drawing code et al. rect.right = rect.right - 4; PsychMakeRect(outRect, rect.left, rect.top, rect.right, rect.bottom); // Unlock GWorld surface. We do a glFinish() before, for safety reasons... //glFinish(); UnlockPixels(GetGWorldPixMap(movieRecordBANK[moviehandle].QTMovieGWorld)); // Ready to use the texture... We're done. } // Normalize texture rectangle and assign it: PsychNormalizeRect(outRect, out_texture->rect); rate = FixedToFloat(GetMovieRate(theMovie)); // Detection of dropped frames: This is a heuristic. We'll see how well it works out... if (rate && presentation_timestamp) { // Try to check for dropped frames in playback mode: // Expected delta between successive presentation timestamps: targetdelta = 1.0f / (movieRecordBANK[moviehandle].fps * rate); // Compute real delta, given rate and playback direction: if (rate>0) { realdelta = *presentation_timestamp - movieRecordBANK[moviehandle].last_pts; if (realdelta<0) realdelta = 0; } else { realdelta = -1.0 * (*presentation_timestamp - movieRecordBANK[moviehandle].last_pts); if (realdelta<0) realdelta = 0; } frames = realdelta / targetdelta; // Dropped frames? if (frames > 1 && movieRecordBANK[moviehandle].last_pts>=0) { movieRecordBANK[moviehandle].nr_droppedframes += (int) (frames - 1 + 0.5); } movieRecordBANK[moviehandle].last_pts = *presentation_timestamp; } // Manually advance movie time, if in fetch mode: if (0 == GetMovieRate(theMovie)) { // We are in manual fetch mode: Need to manually advance movie time to next // media sample: if (nextFramesTime == myNextTime) { // Invalid value? Try to hack something that gets us unstuck: myNextTime = GetMovieTime(theMovie, NULL); nextFramesTime = myNextTime + (TimeValue) 1; } SetMovieTimeValue(theMovie, nextFramesTime); } // Check if end of movie is reached. Rewind, if so... if (IsMovieDone(theMovie) && movieRecordBANK[moviehandle].loopflag > 0) { if (GetMovieRate(theMovie)>0) { GoToBeginningOfMovie(theMovie); } else { GoToEndOfMovie(theMovie); } } return(TRUE); }
PicHandle GetScreenAsPicHandle(int width, int height, int destWidth, int destHeight) { PicHandle myPicture; Rect drawSize, scaleSize; GWorldPtr drawWorld, scaleWorld; Byte *graphicsIn, *graphicsOut; int row, graphicsRowBytes; SetRect(&drawSize, 0, 0, width, height); SetRect(&scaleSize, 0, 0, destWidth, destHeight); InitGWorld(&drawWorld, &drawSize, 16); InitGWorld(&scaleWorld, &scaleSize, 16); graphicsIn = (Byte *) GFX.Screen; graphicsOut = (Byte *) GetPixBaseAddr(GetGWorldPixMap(drawWorld)); graphicsRowBytes = GetPixRowBytes(GetGWorldPixMap(drawWorld)); for (row = 0; row < height; row++) { memcpy(graphicsOut, graphicsIn, width * 2); if (directDisplay) { if (drawingMethod != kDrawingOpenGL) graphicsIn += 512 * 2; else graphicsIn += width * 2; } else { if (lastDrawingMethod != kDrawingOpenGL) graphicsIn += 512 * 2; else graphicsIn += width * 2; } graphicsOut += graphicsRowBytes; } if ((scaleSize.right * scaleSize.bottom) < (drawSize.right * drawSize.bottom)) { PrepareForGDrawing(drawWorld); CopyBits(GetPortBitMapForCopyBits(drawWorld), GetPortBitMapForCopyBits(scaleWorld), &drawSize, &scaleSize, srcCopy | ditherCopy, nil); FinishGDrawing(drawWorld); PrepareForGDrawing(scaleWorld); myPicture = OpenPicture(&scaleSize); CopyBits(GetPortBitMapForCopyBits(scaleWorld), GetPortBitMapForCopyBits(scaleWorld), &scaleSize, &scaleSize, srcCopy, nil); ClosePicture(); FinishGDrawing(scaleWorld); } else { PrepareForGDrawing(scaleWorld); myPicture = OpenPicture(&scaleSize); CopyBits(GetPortBitMapForCopyBits(drawWorld), GetPortBitMapForCopyBits(scaleWorld), &drawSize, &scaleSize, srcCopy, nil); ClosePicture(); FinishGDrawing(scaleWorld); } DisposeGWorld(drawWorld); DisposeGWorld(scaleWorld); return myPicture; }
void TLevelWriter3gp::save(const TImageP &img, int frameIndex) { if (m_cancelled) return; TRasterImageP image(img); int lx = image->getRaster()->getLx(); int ly = image->getRaster()->getLy(); //void *buffer = image->getRaster()->getRawData(); int pixSize = image->getRaster()->getPixelSize(); if (pixSize != 4) throw TImageException(getFilePath(), "Unsupported pixel type"); QMutexLocker sl(&m_mutex); if (!m_properties) m_properties = new Tiio::MovWriterProperties(); Tiio::MovWriterProperties *prop = (Tiio::MovWriterProperties *)(m_properties); //CodecType compression = StandardCompressionType; prop->getCurrentCodec(); //CodecQ quality = StandardQualityType; prop->getCurrentQuality(); if (!m_initDone) { //FSSpec fspec; Rect frame; long max_compressed_size; QDErr err; m_videoTrack = NewMovieTrack(m_movie, FixRatio((short)lx, 1), FixRatio((short)ly, 1), kNoVolume); if ((err = GetMoviesError() != noErr)) throw TImageException(getFilePath(), "can't create video track"); m_dataRef = nil; m_hMovieData = NewHandle(0); // Construct the Handle data reference err = PtrToHand(&m_hMovieData, &m_dataRef, sizeof(Handle)); if ((err = GetMoviesError() != noErr)) throw TImageException(getFilePath(), "can't create Data Ref"); m_videoMedia = NewTrackMedia(m_videoTrack, VideoMediaType, (TINT32)m_frameRate, m_dataRef, HandleDataHandlerSubType); OpenADefaultComponent(MovieExportType, '3gpp', &m_myExporter); // err = (short)MovieExportDoUserDialog(m_myExporter, m_movie, 0, 0, 0, &m_cancelled); // if (m_cancelled) // throw TImageException(getFilePath(), "User abort of 3GP render"); if ((err = GetMoviesError() != noErr)) throw TImageException(getFilePath(), "can't create video media"); if ((err = BeginMediaEdits(m_videoMedia)) != noErr) throw TImageException(getFilePath(), "can't begin edit video media"); frame.left = 0; frame.top = 0; frame.right = lx; frame.bottom = ly; #if 0 if ((err = NewGWorld(&(m_gworld), pixSize * 8, &frame, 0, 0, 0))!=noErr) #else /* Mac OSX 10.7 later */ if ((err = QTNewGWorld(&(m_gworld), pixSize * 8, &frame, 0, 0, 0)) != noErr) #endif throw TImageException(getFilePath(), "can't create movie buffer"); #ifdef WIN32 LockPixels(m_gworld->portPixMap); if ((err = GetMaxCompressionSize(m_gworld->portPixMap, &frame, 0, quality, compression, anyCodec, &max_compressed_size)) != noErr) throw TImageException(getFilePath(), "can't get max compression size"); #else #if 0 PixMapHandle pixmapH = GetPortPixMap (m_gworld); LockPixels(pixmapH); #else PixMapHandle pixmapH = NULL; #endif max_compressed_size = lx * ly * 4 * 20; /*if ((err = GetMaxCompressionSize(pixmapH, &frame, 0, quality, compression,anyCodec, &max_compressed_size))!=noErr) throw TImageException(getFilePath(), "can't get max compression size");*/ #endif m_compressedData = NewHandle(max_compressed_size); if ((err = MemError()) != noErr) throw TImageException(getFilePath(), "can't allocate compressed data for movie"); MoveHHi(m_compressedData); HLock(m_compressedData); if ((err = MemError()) != noErr) throw TImageException(getFilePath(), "can't allocate img handle"); #if 0 m_pixmap = GetGWorldPixMap(m_gworld); if (!LockPixels(m_pixmap)) throw TImageException(getFilePath(), "can't lock pixels"); buf = (PixelXRGB*) GetPixBaseAddr(m_pixmap); #else m_pixmap = NULL; buf = NULL; #endif buf_lx = lx; buf_ly = ly; m_initDone = true; } unsigned short rowBytes = (unsigned short)(((short)(*(m_pixmap))->rowBytes & ~(3 << 14))); Rect frame; ImageDescriptionHandle img_descr; Ptr compressed_data_ptr; QDErr err; frame.left = 0; frame.top = 0; frame.right = lx; frame.bottom = ly; TRasterP ras = image->getRaster(); #ifdef WIN32 compressed_data_ptr = StripAddress(*(m_compressedData)); copy(ras, buf, buf_lx, buf_ly); #else compressed_data_ptr = *m_compressedData; copy(ras, buf, buf_lx, buf_ly, rowBytes); #endif img_descr = (ImageDescriptionHandle)NewHandle(4); #ifdef WIN32 if ((err = CompressImage(m_gworld->portPixMap, &frame, quality, compression, img_descr, compressed_data_ptr)) != noErr) throw TImageException(getFilePath(), "can't compress image"); #else #if 0 PixMapHandle pixmapH = GetPortPixMap (m_gworld); if ((err = CompressImage(pixmapH, &frame, codecNormalQuality, kJPEGCodecType, img_descr, compressed_data_ptr))!=noErr) { throw TImageException(getFilePath(), "can't compress image"); } #endif #endif if ((err = AddMediaSample(m_videoMedia, m_compressedData, 0, (*img_descr)->dataSize, 1, (SampleDescriptionHandle)img_descr, 1, 0, 0)) != noErr) throw TImageException(getFilePath(), "can't add image to movie media"); DisposeHandle((Handle)img_descr); }