bool PBImage::writePBI(string fileName, const YUVImage& image, int pan, int tilt) { protobuf::ProtoBufFrame frame; protobuf::ProtoBufFrame_Camera* camera = frame.add_camera(); protobuf::ProtoBufFrame_Camera_CameraType* camera_type = camera->mutable_type(); camera_type->set_location(protobuf::HEAD_LEFT); camera_type->set_name("unknown"); camera_type->set_sensor_size_width(1600.0 * 0.0022); camera_type->set_sensor_size_height(1200.0 * 0.0022); protobuf::Vector* camera_position = camera->mutable_camera_position(); camera_position->set_x(pan); camera_position->set_y(tilt); camera_position->set_z(0); protobuf::ImageData* imageData = camera->mutable_image_data(); imageData->set_format(protobuf::YUV422_IMAGE); imageData->set_data(image.getImage(), (size_t) (uint32_t) (image.getHeight() * image.getWidth() * 2)); imageData->set_compressed(false); imageData->set_width(image.getWidth()); imageData->set_height(image.getHeight()); fstream output(fileName.c_str(), ios::out | ios::trunc | ios::binary); if (!output.good()) { Debugger::ERR("PBImage", "Could not open output stream"); } if (!frame.IsInitialized()) { Debugger::ERR("PBImage", "ProtoBuf is not initialized!"); string missing = frame.InitializationErrorString(); if (missing != "") { Debugger::ERR("PBImage", "Missing field in annotation: %s", missing.c_str()); } } size_t datasize = (size_t) (uint32_t) (frame.ByteSize()); char* data = (char*) malloc(datasize); if (!frame.SerializeToArray(data, (int) datasize)) { Debugger::ERR("PBImage", "Failed to write PBI!"); free(data); output.flush(); output.close(); return false; } output.write(data, (streamsize) datasize); free(data); output.flush(); output.close(); Debugger::INFO("PBImage", "Successfully wrote PBI %s!", fileName.c_str()); return true; }
bool ShadowImageLayerOGL::Init(const SharedImage& aFront) { if (aFront.type() == SharedImage::TSurfaceDescriptor) { SurfaceDescriptor surface = aFront.get_SurfaceDescriptor(); if (surface.type() == SurfaceDescriptor::TSharedTextureDescriptor) { SharedTextureDescriptor texture = surface.get_SharedTextureDescriptor(); mSize = texture.size(); mSharedHandle = texture.handle(); mShareType = texture.shareType(); mInverted = texture.inverted(); } else { AutoOpenSurface autoSurf(OPEN_READ_ONLY, surface); mSize = autoSurf.Size(); mTexImage = gl()->CreateTextureImage(nsIntSize(mSize.width, mSize.height), autoSurf.ContentType(), LOCAL_GL_CLAMP_TO_EDGE, mForceSingleTile ? TextureImage::ForceSingleTile : TextureImage::NoFlags); } } else { YUVImage yuv = aFront.get_YUVImage(); AutoOpenSurface surfY(OPEN_READ_ONLY, yuv.Ydata()); AutoOpenSurface surfU(OPEN_READ_ONLY, yuv.Udata()); mSize = surfY.Size(); mCbCrSize = surfU.Size(); if (!mYUVTexture[0].IsAllocated()) { mYUVTexture[0].Allocate(gl()); mYUVTexture[1].Allocate(gl()); mYUVTexture[2].Allocate(gl()); } NS_ASSERTION(mYUVTexture[0].IsAllocated() && mYUVTexture[1].IsAllocated() && mYUVTexture[2].IsAllocated(), "Texture allocation failed!"); gl()->MakeCurrent(); SetClamping(gl(), mYUVTexture[0].GetTextureID()); SetClamping(gl(), mYUVTexture[1].GetTextureID()); SetClamping(gl(), mYUVTexture[2].GetTextureID()); return true; } return false; }
bool ShadowImageLayerOGL::Init(const SharedImage& aFront) { if (aFront.type() == SharedImage::TSurfaceDescriptor) { SurfaceDescriptor desc = aFront.get_SurfaceDescriptor(); nsRefPtr<gfxASurface> surf = ShadowLayerForwarder::OpenDescriptor(desc); mSize = surf->GetSize(); mTexImage = gl()->CreateTextureImage(nsIntSize(mSize.width, mSize.height), surf->GetContentType(), LOCAL_GL_CLAMP_TO_EDGE, mForceSingleTile ? TextureImage::ForceSingleTile : TextureImage::NoFlags); return true; } else { YUVImage yuv = aFront.get_YUVImage(); nsRefPtr<gfxSharedImageSurface> surfY = gfxSharedImageSurface::Open(yuv.Ydata()); nsRefPtr<gfxSharedImageSurface> surfU = gfxSharedImageSurface::Open(yuv.Udata()); nsRefPtr<gfxSharedImageSurface> surfV = gfxSharedImageSurface::Open(yuv.Vdata()); mSize = surfY->GetSize(); mCbCrSize = surfU->GetSize(); if (!mYUVTexture[0].IsAllocated()) { mYUVTexture[0].Allocate(gl()); mYUVTexture[1].Allocate(gl()); mYUVTexture[2].Allocate(gl()); } NS_ASSERTION(mYUVTexture[0].IsAllocated() && mYUVTexture[1].IsAllocated() && mYUVTexture[2].IsAllocated(), "Texture allocation failed!"); gl()->MakeCurrent(); SetClamping(gl(), mYUVTexture[0].GetTextureID()); SetClamping(gl(), mYUVTexture[1].GetTextureID()); SetClamping(gl(), mYUVTexture[2].GetTextureID()); return true; } return false; }
void ShadowImageLayerOGL::UploadSharedYUVToTexture(const YUVImage& yuv) { AutoOpenSurface asurfY(OPEN_READ_ONLY, yuv.Ydata()); AutoOpenSurface asurfU(OPEN_READ_ONLY, yuv.Udata()); AutoOpenSurface asurfV(OPEN_READ_ONLY, yuv.Vdata()); nsRefPtr<gfxImageSurface> surfY = asurfY.GetAsImage(); nsRefPtr<gfxImageSurface> surfU = asurfU.GetAsImage(); nsRefPtr<gfxImageSurface> surfV = asurfV.GetAsImage(); mPictureRect = yuv.picture(); gfxIntSize size = surfY->GetSize(); gfxIntSize CbCrSize = surfU->GetSize(); if (size != mSize || mCbCrSize != CbCrSize || !mYUVTexture[0].IsAllocated()) { mSize = surfY->GetSize(); mCbCrSize = surfU->GetSize(); if (!mYUVTexture[0].IsAllocated()) { mYUVTexture[0].Allocate(gl()); mYUVTexture[1].Allocate(gl()); mYUVTexture[2].Allocate(gl()); } NS_ASSERTION(mYUVTexture[0].IsAllocated() && mYUVTexture[1].IsAllocated() && mYUVTexture[2].IsAllocated(), "Texture allocation failed!"); gl()->MakeCurrent(); SetClamping(gl(), mYUVTexture[0].GetTextureID()); SetClamping(gl(), mYUVTexture[1].GetTextureID()); SetClamping(gl(), mYUVTexture[2].GetTextureID()); } PlanarYCbCrImage::Data data; data.mYChannel = surfY->Data(); data.mYStride = surfY->Stride(); data.mYSize = surfY->GetSize(); data.mCbChannel = surfU->Data(); data.mCrChannel = surfV->Data(); data.mCbCrStride = surfU->Stride(); data.mCbCrSize = surfU->GetSize(); UploadYUVToTexture(gl(), data, &mYUVTexture[0], &mYUVTexture[1], &mYUVTexture[2]); }
void Histogram::build(YUVImage& yuvImage, u16 _colorRange) { setColorRange(_colorRange); orgWidth(yuvImage.orgWidth()); orgHeight(yuvImage.orgHeight()); // Clear histogram for (size_t i = 0; i < YUVImage::nPixels; i++) { y_[yuvImage.y(i)*_colorRange/256]++; u_[yuvImage.u(i)*_colorRange/256]++; v_[yuvImage.v(i)*_colorRange/256]++; } for (size_t i = 0; i < colorRange_; i++) { y_[i] = y_[i]*256*256/YUVImage::nPixels; u_[i] = u_[i]*256*256/YUVImage::nPixels; v_[i] = v_[i]*256*256/YUVImage::nPixels; } }
void Serializer<YUVImage>::deserialize(istream& stream, YUVImage& representation) { protobuf::YUVImage img; IstreamInputStream buf(&stream); img.ParseFromZeroCopyStream(&buf); int width = img.width(); int height = img.height(); representation.updateImage(NULL, width, height); }
void Serializer<YUVImage>::serialize(const YUVImage& representation, ostream& stream) { if (representation.getImage() != NULL) { protobuf::YUVImage img; img.set_height(representation.getHeight()); img.set_width(representation.getWidth()); img.set_data(representation.getImage(), (size_t) (uint32_t) (representation.getWidth() * representation.getHeight() * SIZE_OF_YUV422_PIXEL)); OstreamOutputStream buf(&stream); img.SerializeToZeroCopyStream(&buf); } }
size_t MJPEGStreamer::compress_yuyv_to_jpeg(const YUVImage& src, uint8_t* buffer, size_t size, int quality) const { struct jpeg_compress_struct cinfo; struct jpeg_error_mgr jerr; JSAMPROW row_pointer[1]; uint8_t *line_buffer; const uint8_t *yuyv; int z; static size_t written; line_buffer = (uint8_t*)calloc((size_t)(uint32_t)(src.getWidth() * 3), (size_t)1); if (line_buffer == NULL) { return 0; } yuyv = (const uint8_t*)src.getImage(); cinfo.err = jpeg_std_error (&jerr); jpeg_create_compress (&cinfo); /* jpeg_stdio_dest (&cinfo, file); */ dest_buffer(&cinfo, buffer, size, &written); cinfo.image_width = (uint32_t)src.getWidth(); cinfo.image_height = (uint32_t)src.getHeight(); cinfo.input_components = 3; cinfo.in_color_space = JCS_RGB; jpeg_set_defaults (&cinfo); jpeg_set_quality (&cinfo, quality, TRUE); jpeg_start_compress (&cinfo, TRUE); z = 0; while (cinfo.next_scanline < (uint32_t)src.getHeight()) { int x; uint8_t *ptr = line_buffer; for (x = 0; x < src.getWidth(); x++) { int r, g, b; int y, u, v; if (!z) y = yuyv[0] << 8; else y = yuyv[2] << 8; u = yuyv[1] - 128; v = yuyv[3] - 128; //lint -e{702} r = (y + (359 * v)) >> 8; //lint -e{702} g = ((y - (88 * u)) - (183 * v)) >> 8; //lint -e{702} b = (y + (454 * u)) >> 8; *(ptr++) = (uint8_t)((r > 255) ? 255 : ((r < 0) ? 0 : r)); *(ptr++) = (uint8_t)((g > 255) ? 255 : ((g < 0) ? 0 : g)); *(ptr++) = (uint8_t)((b > 255) ? 255 : ((b < 0) ? 0 : b)); if (z++) { z = 0; yuyv += 4; } } row_pointer[0] = line_buffer; jpeg_write_scanlines (&cinfo, row_pointer, 1); } jpeg_finish_compress (&cinfo); jpeg_destroy_compress (&cinfo); free (line_buffer); return (written); }