PassRefPtr<Evas_Object> evasObjectFromCairoImageSurface(Evas* canvas, cairo_surface_t* surface) { EINA_SAFETY_ON_NULL_RETURN_VAL(canvas, 0); EINA_SAFETY_ON_NULL_RETURN_VAL(surface, 0); cairo_status_t status = cairo_surface_status(surface); if (status != CAIRO_STATUS_SUCCESS) { EINA_LOG_ERR("cairo surface is invalid: %s", cairo_status_to_string(status)); return 0; } cairo_surface_type_t type = cairo_surface_get_type(surface); if (type != CAIRO_SURFACE_TYPE_IMAGE) { EINA_LOG_ERR("unknown surface type %d, required %d (CAIRO_SURFACE_TYPE_IMAGE).", type, CAIRO_SURFACE_TYPE_IMAGE); return 0; } cairo_format_t format = cairo_image_surface_get_format(surface); if (format != CAIRO_FORMAT_ARGB32 && format != CAIRO_FORMAT_RGB24) { EINA_LOG_ERR("unknown surface format %d, expected %d or %d.", format, CAIRO_FORMAT_ARGB32, CAIRO_FORMAT_RGB24); return 0; } int width = cairo_image_surface_get_width(surface); int height = cairo_image_surface_get_height(surface); int stride = cairo_image_surface_get_stride(surface); if (width <= 0 || height <= 0 || stride <= 0) { EINA_LOG_ERR("invalid image size %dx%d, stride=%d", width, height, stride); return 0; } void* data = cairo_image_surface_get_data(surface); if (!data) { EINA_LOG_ERR("could not get source data."); return 0; } RefPtr<Evas_Object> image = adoptRef(evas_object_image_filled_add(canvas)); if (!image) { EINA_LOG_ERR("could not add image to canvas."); return 0; } evas_object_image_colorspace_set(image.get(), EVAS_COLORSPACE_ARGB8888); evas_object_image_size_set(image.get(), width, height); evas_object_image_alpha_set(image.get(), format == CAIRO_FORMAT_ARGB32); if (evas_object_image_stride_get(image.get()) != stride) { EINA_LOG_ERR("evas' stride %d diverges from cairo's %d.", evas_object_image_stride_get(image.get()), stride); return 0; } evas_object_image_data_copy_set(image.get(), data); return image.release(); }
static void _ecore_evas_resize(Ecore_Evas *ee, int w, int h) { Evas_Engine_Info_Buffer *einfo; Ecore_Evas_Engine_Buffer_Data *bdata = ee->engine.data; int stride = 0; if (w < 1) w = 1; if (h < 1) h = 1; ee->req.w = w; ee->req.h = h; if ((w == ee->w) && (h == ee->h)) return; ee->w = w; ee->h = h; evas_output_size_set(ee->evas, ee->w, ee->h); evas_output_viewport_set(ee->evas, 0, 0, ee->w, ee->h); evas_damage_rectangle_add(ee->evas, 0, 0, ee->w, ee->h); if (bdata->image) { bdata->pixels = evas_object_image_data_get(bdata->image, 1); stride = evas_object_image_stride_get(bdata->image); } else { if (bdata->pixels) bdata->free_func(bdata->data, bdata->pixels); bdata->pixels = bdata->alloc_func(bdata->data, ee->w * ee->h * sizeof(int)); stride = ee->w * sizeof(int); } einfo = (Evas_Engine_Info_Buffer *)evas_engine_info_get(ee->evas); if (einfo) { if (ee->alpha) einfo->info.depth_type = EVAS_ENGINE_BUFFER_DEPTH_ARGB32; else einfo->info.depth_type = EVAS_ENGINE_BUFFER_DEPTH_RGB32; einfo->info.dest_buffer = bdata->pixels; einfo->info.dest_buffer_row_bytes = stride; einfo->info.use_color_key = 0; einfo->info.alpha_threshold = 0; einfo->info.func.new_update_region = NULL; einfo->info.func.free_update_region = NULL; if (!evas_engine_info_set(ee->evas, (Evas_Engine_Info *)einfo)) { ERR("evas_engine_info_set() for engine '%s' failed.", ee->driver); } } if (bdata->image) evas_object_image_data_set(bdata->image, bdata->pixels); else bdata->resized = 1; }
static Evas_Object* differenceImageFromDifferenceBuffer(Evas* evas, unsigned char* buffer, int width, int height) { Evas_Object* image = evas_object_image_filled_add(evas); if (!image) abortWithErrorMessage("could not create difference image"); evas_object_image_size_set(image, width, height); evas_object_image_colorspace_set(image, EVAS_COLORSPACE_ARGB8888); unsigned char* diffPixels = static_cast<unsigned char*>(evas_object_image_data_get(image, EINA_TRUE)); const int rowStride = evas_object_image_stride_get(image); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { unsigned char* diffPixel = pixelFromImageData(diffPixels, rowStride, x, y); diffPixel[Red] = diffPixel[Green] = diffPixel[Blue] = *buffer++; diffPixel[Alpha] = 0xff; } } evas_object_image_data_set(image, diffPixels); return image; }
static float calculateDifference(Evas_Object* baselineImage, Evas_Object* actualImage, RefPtr<Evas_Object>& differenceImage) { int width, height, baselineWidth, baselineHeight; evas_object_image_size_get(actualImage, &width, &height); evas_object_image_size_get(baselineImage, &baselineWidth, &baselineHeight); if (width != baselineWidth || height != baselineHeight) { printf("Error, test and reference image have different sizes.\n"); return 100; // Completely different. } OwnArrayPtr<unsigned char> diffBuffer = adoptArrayPtr(new unsigned char[width * height]); if (!diffBuffer) abortWithErrorMessage("could not create difference buffer"); const int actualRowStride = evas_object_image_stride_get(actualImage); const int baseRowStride = evas_object_image_stride_get(baselineImage); unsigned numberOfDifferentPixels = 0; float totalDistance = 0; float maxDistance = 0; unsigned char* actualPixels = static_cast<unsigned char*>(evas_object_image_data_get(actualImage, EINA_FALSE)); unsigned char* basePixels = static_cast<unsigned char*>(evas_object_image_data_get(baselineImage, EINA_FALSE)); unsigned char* currentDiffPixel = diffBuffer.get(); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { unsigned char* actualPixel = pixelFromImageData(actualPixels, actualRowStride, x, y); unsigned char* basePixel = pixelFromImageData(basePixels, baseRowStride, x, y); const float distance = calculatePixelDifference(basePixel, actualPixel); *currentDiffPixel++ = static_cast<unsigned char>(distance * 255.0f); if (distance >= 1.0f / 255.0f) { ++numberOfDifferentPixels; totalDistance += distance; maxDistance = std::max<float>(maxDistance, distance); } } } // When using evas_object_image_data_get(), a complementary evas_object_data_set() must be // issued to balance the reference count, even if the image hasn't been changed. evas_object_image_data_set(baselineImage, basePixels); evas_object_image_data_set(actualImage, actualPixels); // Compute the difference as a percentage combining both the number of // different pixels and their difference amount i.e. the average distance // over the entire image float difference = 0; if (numberOfDifferentPixels) difference = 100.0f * totalDistance / (height * width); if (difference <= gTolerance) difference = 0; else { difference = roundf(difference * 100.0f) / 100.0f; difference = std::max(difference, 0.01f); // round to 2 decimal places differenceImage = adoptRef(differenceImageFromDifferenceBuffer(evas_object_evas_get(baselineImage), diffBuffer.get(), width, height)); } return difference; }