static cairo_status_t gallium_surface_flush (void *abstract_surface) { gallium_surface_t *surface = abstract_surface; gallium_device_t *device = gallium_device (surface); cairo_status_t status; if (surface->fallback == NULL) { device->pipe->flush (device->pipe, PIPE_FLUSH_RENDER_CACHE, NULL); return CAIRO_STATUS_SUCCESS; } /* kill any outstanding maps */ cairo_surface_finish (surface->fallback); status = cairo_device_acquire (&device->drm.base); if (likely (status == CAIRO_STATUS_SUCCESS)) { device->pipe->transfer_unmap (device->pipe, surface->map_transfer); device->pipe->transfer_destroy (device->pipe, surface->map_transfer); surface->map_transfer = NULL; cairo_device_release (&device->drm.base); } status = cairo_surface_status (surface->fallback); cairo_surface_destroy (surface->fallback); surface->fallback = NULL; return status; }
static VALUE cr_device_release (VALUE self) { cairo_device_release (_SELF); cr_device_check_status (_SELF); return self; }
static cairo_status_t gallium_surface_acquire_source_image (void *abstract_surface, cairo_image_surface_t **image_out, void **image_extra) { gallium_surface_t *surface = abstract_surface; gallium_device_t *device = gallium_device (surface); cairo_format_t format; cairo_surface_t *image; cairo_status_t status; struct pipe_transfer *transfer; void *ptr; if (surface->fallback != NULL) { *image_out = (cairo_image_surface_t *) cairo_surface_reference (surface->fallback); *image_extra = NULL; return CAIRO_STATUS_SUCCESS; } if (unlikely (surface->drm.width == 0 || surface->drm.height == 0)) { image = cairo_image_surface_create (surface->drm.format, 0, 0); if (unlikely (image->status)) return image->status; *image_out = (cairo_image_surface_t *) image; *image_extra = NULL; return CAIRO_STATUS_SUCCESS; } format = _cairo_format_from_pipe_format (surface->pipe_format); if (format == CAIRO_FORMAT_INVALID) return CAIRO_INT_STATUS_UNSUPPORTED; status = cairo_device_acquire (&device->drm.base); if (unlikely (status)) return status; transfer = pipe_get_transfer (device->pipe, surface->texture, 0, 0, 0, PIPE_TRANSFER_READ, 0, 0, surface->drm.width, surface->drm.height); ptr = device->pipe->transfer_map (device->pipe, transfer); cairo_device_release (&device->drm.base); image = cairo_image_surface_create_for_data (ptr, format, surface->drm.width, surface->drm.height, surface->drm.stride); if (unlikely (image->status)) return image->status; *image_out = (cairo_image_surface_t *) image; *image_extra = transfer; return CAIRO_STATUS_SUCCESS; }
static cairo_surface_t * display_create_drm_surface_from_file(struct display *display, const char *filename, struct rectangle *rect) { cairo_surface_t *surface; GdkPixbuf *pixbuf; GError *error = NULL; int stride, i; unsigned char *pixels, *p, *end; struct drm_surface_data *data; pixbuf = gdk_pixbuf_new_from_file_at_scale(filename, rect->width, rect->height, FALSE, &error); if (error != NULL) return NULL; if (!gdk_pixbuf_get_has_alpha(pixbuf) || gdk_pixbuf_get_n_channels(pixbuf) != 4) { gdk_pixbuf_unref(pixbuf); return NULL; } stride = gdk_pixbuf_get_rowstride(pixbuf); pixels = gdk_pixbuf_get_pixels(pixbuf); for (i = 0; i < rect->height; i++) { p = pixels + i * stride; end = p + rect->width * 4; while (p < end) { unsigned int t; MULT(p[0], p[0], p[3], t); MULT(p[1], p[1], p[3], t); MULT(p[2], p[2], p[3], t); p += 4; } } surface = display_create_drm_surface(display, rect); data = cairo_surface_get_user_data(surface, &surface_data_key); cairo_device_acquire(display->device); glBindTexture(GL_TEXTURE_2D, data->texture); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, rect->width, rect->height, GL_RGBA, GL_UNSIGNED_BYTE, pixels); cairo_device_release(display->device); gdk_pixbuf_unref(pixbuf); return surface; }
static void _cairo_boilerplate_egl_synchronize (void *closure) { egl_target_closure_t *gltc = closure; if (cairo_device_acquire (gltc->device)) return; glFinish (); cairo_device_release (gltc->device); }
static cairo_surface_t * display_create_drm_surface(struct display *display, struct rectangle *rectangle) { struct drm_surface_data *data; EGLDisplay dpy = display->dpy; cairo_surface_t *surface; struct wl_visual *visual; EGLint name, stride; EGLint image_attribs[] = { EGL_WIDTH, 0, EGL_HEIGHT, 0, EGL_DRM_BUFFER_FORMAT_MESA, EGL_DRM_BUFFER_FORMAT_ARGB32_MESA, EGL_DRM_BUFFER_USE_MESA, EGL_DRM_BUFFER_USE_SCANOUT_MESA, EGL_NONE }; data = malloc(sizeof *data); if (data == NULL) return NULL; data->display = display; image_attribs[1] = rectangle->width; image_attribs[3] = rectangle->height; data->image = eglCreateDRMImageMESA(dpy, image_attribs); cairo_device_acquire(display->device); glGenTextures(1, &data->texture); glBindTexture(GL_TEXTURE_2D, data->texture); glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, data->image); cairo_device_release(display->device); eglExportDRMImageMESA(display->dpy, data->image, &name, NULL, &stride); visual = wl_display_get_premultiplied_argb_visual(display->display); data->data.buffer = wl_drm_create_buffer(display->drm, name, rectangle->width, rectangle->height, stride, visual); surface = cairo_gl_surface_create_for_texture(display->device, CAIRO_CONTENT_COLOR_ALPHA, data->texture, rectangle->width, rectangle->height); cairo_surface_set_user_data (surface, &surface_data_key, data, drm_surface_data_destroy); return surface; }
static void drm_surface_data_destroy(void *p) { struct drm_surface_data *data = p; struct display *d = data->display; cairo_device_acquire(d->device); glDeleteTextures(1, &data->texture); cairo_device_release(d->device); eglDestroyImageKHR(d->dpy, data->image); wl_buffer_destroy(data->data.buffer); }
static cairo_status_t gallium_surface_finish (void *abstract_surface) { gallium_surface_t *surface = abstract_surface; gallium_device_t *device = gallium_device (surface); cairo_status_t status; status = cairo_device_acquire (&device->drm.base); if (likely (status == CAIRO_STATUS_SUCCESS)) { pipe_resource_reference (&surface->texture, NULL); cairo_device_release (&device->drm.base); } return _cairo_drm_surface_finish (&surface->drm); }
cairo_status_t i965_clip_and_composite_spans (i965_surface_t *dst, cairo_operator_t op, const cairo_pattern_t *pattern, cairo_antialias_t antialias, i965_spans_func_t draw_func, void *draw_closure, const cairo_composite_rectangles_t*extents, cairo_clip_t *clip) { i965_spans_t spans; i965_device_t *device; cairo_status_t status; if (op == CAIRO_OPERATOR_CLEAR) { pattern = &_cairo_pattern_white.base; op = CAIRO_OPERATOR_DEST_OUT; } status = i965_spans_init (&spans, dst, op, pattern, antialias, clip, extents); if (unlikely (status)) return status; spans.shader.mask.base.content = CAIRO_CONTENT_ALPHA; spans.shader.mask.type.fragment = FS_SPANS; spans.shader.mask.type.vertex = VS_SPANS; spans.shader.mask.type.pattern = PATTERN_BASE; status = cairo_device_acquire (dst->intel.drm.base.device); if (unlikely (status)) goto CLEANUP_SPANS; device = i965_device (dst); status = i965_shader_commit (&spans.shader, device); if (unlikely (status)) goto CLEANUP_DEVICE; status = draw_func (draw_closure, &spans.renderer, spans.extents); if (spans.clip_region != NULL && status == CAIRO_STATUS_SUCCESS) i965_clipped_vertices (device, &spans.head, spans.clip_region); CLEANUP_DEVICE: cairo_device_release (dst->intel.drm.base.device); CLEANUP_SPANS: i965_spans_fini (&spans); return status; }
static char * _cairo_boilerplate_gl_describe (void *closure) { gl_target_closure_t *gltc = closure; char *s; const GLubyte *vendor, *renderer, *version; if (cairo_device_acquire (gltc->device)) return NULL; vendor = glGetString (GL_VENDOR); renderer = glGetString (GL_RENDERER); version = glGetString (GL_VERSION); xasprintf (&s, "%s %s %s", vendor, renderer, version); cairo_device_release (gltc->device); return s; }
static cairo_surface_t * gallium_surface_map_to_image (gallium_surface_t *surface) { gallium_device_t *device = gallium_device (surface); cairo_status_t status; void *ptr = NULL; status = cairo_device_acquire (&device->drm.base); if (unlikely (status)) return _cairo_surface_create_in_error (status); surface->map_transfer = pipe_get_transfer (device->pipe, surface->texture, 0, 0, 0, PIPE_TRANSFER_MAP_DIRECTLY | PIPE_TRANSFER_READ_WRITE, 0, 0, surface->drm.width, surface->drm.height); if (likely (surface->map_transfer != NULL)) ptr = device->pipe->transfer_map (device->pipe, surface->map_transfer); cairo_device_release (&device->drm.base); if (unlikely (ptr == NULL)) { if (surface->map_transfer != NULL) { device->pipe->transfer_destroy (device->pipe, surface->map_transfer); surface->map_transfer = NULL; } return _cairo_surface_create_in_error (_cairo_error (CAIRO_STATUS_NO_MEMORY)); } return cairo_image_surface_create_for_data (ptr, surface->drm.format, surface->drm.width, surface->drm.height, surface->map_transfer->stride); }
static cairo_surface_t * gallium_surface_create_similar (void *abstract_src, cairo_content_t content, int width, int height) { gallium_surface_t *other = abstract_src; gallium_device_t *device = gallium_device (other); enum pipe_format pipe_format; cairo_surface_t *surface = NULL; cairo_status_t status; status = cairo_device_acquire (&device->drm.base); if (unlikely (status)) return _cairo_surface_create_in_error (status); if (MAX (width, height) > device->max_size) goto RELEASE; if (content == other->drm.base.content) pipe_format = other->pipe_format; else pipe_format = pipe_format_from_content (content); if (! format_is_supported_destination (device, pipe_format)) goto RELEASE; surface = gallium_surface_create_internal (device, pipe_format, width, height); RELEASE: cairo_device_release (&device->drm.base); return surface; }
cairo_int_status_t i965_surface_glyphs (void *abstract_surface, cairo_operator_t op, const cairo_pattern_t *source, cairo_glyph_t *g, int num_glyphs, cairo_scaled_font_t *scaled_font, cairo_clip_t *clip, int *num_remaining) { i965_surface_t *surface = abstract_surface; i965_surface_t *mask = NULL; i965_device_t *device; i965_glyphs_t glyphs; cairo_composite_rectangles_t extents; cairo_clip_t local_clip; cairo_bool_t have_clip = FALSE; cairo_bool_t overlap; cairo_region_t *clip_region = NULL; intel_bo_t *last_bo = NULL; cairo_scaled_glyph_t *glyph_cache[64]; cairo_status_t status; int mask_x = 0, mask_y = 0; int i = 0; *num_remaining = 0; status = _cairo_composite_rectangles_init_for_glyphs (&extents, surface->intel.drm.width, surface->intel.drm.height, op, source, scaled_font, g, num_glyphs, clip, &overlap); if (unlikely (status)) return status; if (clip != NULL && _cairo_clip_contains_rectangle (clip, &extents.mask)) clip = NULL; if (clip != NULL && extents.is_bounded) { clip = _cairo_clip_init_copy (&local_clip, clip); status = _cairo_clip_rectangle (clip, &extents.bounded); if (unlikely (status)) return status; have_clip = TRUE; } if (overlap || ! extents.is_bounded) { cairo_format_t format; format = CAIRO_FORMAT_A8; if (scaled_font->options.antialias == CAIRO_ANTIALIAS_SUBPIXEL) format = CAIRO_FORMAT_ARGB32; mask = (i965_surface_t *) i965_surface_create_internal (&i965_device (surface)->intel.base, format, extents.bounded.width, extents.bounded.height, I965_TILING_DEFAULT, TRUE); if (unlikely (mask->intel.drm.base.status)) return mask->intel.drm.base.status; status = _cairo_surface_paint (&mask->intel.drm.base, CAIRO_OPERATOR_CLEAR, &_cairo_pattern_clear.base, NULL); if (unlikely (status)) { cairo_surface_destroy (&mask->intel.drm.base); return status; } i965_shader_init (&glyphs.shader, mask, CAIRO_OPERATOR_ADD); status = i965_shader_acquire_pattern (&glyphs.shader, &glyphs.shader.source, &_cairo_pattern_white.base, &extents.bounded); if (unlikely (status)) { cairo_surface_destroy (&mask->intel.drm.base); return status; } mask_x = -extents.bounded.x; mask_y = -extents.bounded.y; } else { i965_shader_init (&glyphs.shader, surface, op); status = i965_shader_acquire_pattern (&glyphs.shader, &glyphs.shader.source, source, &extents.bounded); if (unlikely (status)) return status; if (clip != NULL) { status = _cairo_clip_get_region (clip, &clip_region); assert (status == CAIRO_STATUS_SUCCESS || status == CAIRO_INT_STATUS_UNSUPPORTED); if (status == CAIRO_INT_STATUS_UNSUPPORTED) i965_shader_set_clip (&glyphs.shader, clip); } } glyphs.head.next = NULL; glyphs.head.bo = NULL; glyphs.head.count = 0; glyphs.tail = &glyphs.head; device = i965_device (surface); if (mask != NULL || clip_region == NULL) { glyphs.get_rectangle = i965_glyphs_emit_rectangle; } else { glyphs.get_rectangle = i965_glyphs_accumulate_rectangle; glyphs.head.bo = intel_bo_create (&device->intel, I965_VERTEX_SIZE, I965_VERTEX_SIZE, FALSE, I915_TILING_NONE, 0); if (unlikely (glyphs.head.bo == NULL)) return _cairo_error (CAIRO_STATUS_NO_MEMORY); glyphs.vbo_base = intel_bo_map (&device->intel, glyphs.head.bo); } glyphs.vbo_offset = 0; status = cairo_device_acquire (&device->intel.base.base); if (unlikely (status)) goto CLEANUP_GLYPHS; _cairo_scaled_font_freeze_cache (scaled_font); //private = _cairo_scaled_font_get_device (scaled_font, device); if (scaled_font->surface_private == NULL) { scaled_font->surface_private = device; scaled_font->surface_backend = surface->intel.drm.base.backend; cairo_list_add (&scaled_font->link, &device->intel.fonts); } memset (glyph_cache, 0, sizeof (glyph_cache)); for (i = 0; i < num_glyphs; i++) { cairo_scaled_glyph_t *scaled_glyph; int x, y, x1, x2, y1, y2; int cache_index = g[i].index % ARRAY_LENGTH (glyph_cache); intel_glyph_t *glyph; scaled_glyph = glyph_cache[cache_index]; if (scaled_glyph == NULL || _cairo_scaled_glyph_index (scaled_glyph) != g[i].index) { status = _cairo_scaled_glyph_lookup (scaled_font, g[i].index, CAIRO_SCALED_GLYPH_INFO_METRICS, &scaled_glyph); if (unlikely (status)) goto FINISH; glyph_cache[cache_index] = scaled_glyph; } if (unlikely (scaled_glyph->metrics.width == 0 || scaled_glyph->metrics.height == 0)) { continue; } /* XXX glyph images are snapped to pixel locations */ x = _cairo_lround (g[i].x); y = _cairo_lround (g[i].y); x1 = x + _cairo_fixed_integer_floor (scaled_glyph->bbox.p1.x); y1 = y + _cairo_fixed_integer_floor (scaled_glyph->bbox.p1.y); x2 = x + _cairo_fixed_integer_ceil (scaled_glyph->bbox.p2.x); y2 = y + _cairo_fixed_integer_ceil (scaled_glyph->bbox.p2.y); if (x2 < extents.bounded.x || y2 < extents.bounded.y || x1 > extents.bounded.x + extents.bounded.width || y1 > extents.bounded.y + extents.bounded.height) { continue; } if (scaled_glyph->surface_private == NULL) { status = intel_get_glyph (&device->intel, scaled_font, scaled_glyph); if (unlikely (status == CAIRO_INT_STATUS_NOTHING_TO_DO)) { status = CAIRO_STATUS_SUCCESS; continue; } if (unlikely (status)) goto FINISH; } glyph = intel_glyph_pin (scaled_glyph->surface_private); if (glyph->cache->buffer.bo != last_bo) { intel_buffer_cache_t *cache = glyph->cache; glyphs.shader.mask.type.vertex = VS_GLYPHS; glyphs.shader.mask.type.fragment = FS_GLYPHS; glyphs.shader.mask.type.pattern = PATTERN_BASE; glyphs.shader.mask.base.bo = cache->buffer.bo; glyphs.shader.mask.base.format = cache->buffer.format; glyphs.shader.mask.base.width = cache->buffer.width; glyphs.shader.mask.base.height = cache->buffer.height; glyphs.shader.mask.base.stride = cache->buffer.stride; glyphs.shader.mask.base.filter = i965_filter (CAIRO_FILTER_NEAREST); glyphs.shader.mask.base.extend = i965_extend (CAIRO_EXTEND_NONE); glyphs.shader.mask.base.content = CAIRO_CONTENT_ALPHA; /* XXX */ glyphs.shader.committed = FALSE; status = i965_shader_commit (&glyphs.shader, device); if (unlikely (status)) goto FINISH; last_bo = cache->buffer.bo; } x2 = x1 + glyph->width; y2 = y1 + glyph->height; if (mask_x) x1 += mask_x, x2 += mask_x; if (mask_y) y1 += mask_y, y2 += mask_y; i965_add_glyph_rectangle (&glyphs, x1, y1, x2, y2, glyph); } if (mask != NULL && clip_region != NULL) i965_clipped_vertices (device, &glyphs.head, clip_region); status = CAIRO_STATUS_SUCCESS; FINISH: _cairo_scaled_font_thaw_cache (scaled_font); cairo_device_release (surface->intel.drm.base.device); CLEANUP_GLYPHS: i965_shader_fini (&glyphs.shader); if (glyphs.head.bo != NULL) { struct i965_vbo *vbo, *next; intel_bo_destroy (&device->intel, glyphs.head.bo); for (vbo = glyphs.head.next; vbo != NULL; vbo = next) { next = vbo->next; intel_bo_destroy (&device->intel, vbo->bo); free (vbo); } } if (unlikely (status == CAIRO_INT_STATUS_UNSUPPORTED)) { cairo_path_fixed_t path; _cairo_path_fixed_init (&path); status = _cairo_scaled_font_glyph_path (scaled_font, g + i, num_glyphs - i, &path); if (mask_x | mask_y) { _cairo_path_fixed_translate (&path, _cairo_fixed_from_int (mask_x), _cairo_fixed_from_int (mask_y)); } if (likely (status == CAIRO_STATUS_SUCCESS)) { status = surface->intel.drm.base.backend->fill (glyphs.shader.target, glyphs.shader.op, mask != NULL ? &_cairo_pattern_white.base : source, &path, CAIRO_FILL_RULE_WINDING, 0, scaled_font->options.antialias, clip); } _cairo_path_fixed_fini (&path); } if (mask != NULL) { if (likely (status == CAIRO_STATUS_SUCCESS)) { status = i965_surface_mask_internal (surface, op, source, mask, clip, &extents); } cairo_surface_finish (&mask->intel.drm.base); cairo_surface_destroy (&mask->intel.drm.base); } if (have_clip) _cairo_clip_fini (&local_clip); return status; }
static cairo_status_t i965_surface_mask_internal (i965_surface_t *dst, cairo_operator_t op, const cairo_pattern_t *source, i965_surface_t *mask, cairo_clip_t *clip, const cairo_composite_rectangles_t *extents) { i965_device_t *device; i965_shader_t shader; cairo_region_t *clip_region = NULL; cairo_status_t status; i965_shader_init (&shader, dst, op); status = i965_shader_acquire_pattern (&shader, &shader.source, source, &extents->bounded); if (unlikely (status)) return status; shader.mask.type.vertex = VS_NONE; shader.mask.type.fragment = FS_SURFACE; shader.mask.base.content = mask->intel.drm.base.content; shader.mask.base.filter = i965_filter (CAIRO_FILTER_NEAREST); shader.mask.base.extend = i965_extend (CAIRO_EXTEND_NONE); cairo_matrix_init_translate (&shader.mask.base.matrix, -extents->bounded.x, -extents->bounded.y); cairo_matrix_scale (&shader.mask.base.matrix, 1. / mask->intel.drm.width, 1. / mask->intel.drm.height); shader.mask.base.bo = to_intel_bo (mask->intel.drm.bo); shader.mask.base.format = mask->intel.drm.format; shader.mask.base.width = mask->intel.drm.width; shader.mask.base.height = mask->intel.drm.height; shader.mask.base.stride = mask->intel.drm.stride; if (clip != NULL) { status = _cairo_clip_get_region (clip, &clip_region); assert (status == CAIRO_STATUS_SUCCESS || status == CAIRO_INT_STATUS_UNSUPPORTED); if (clip_region != NULL && cairo_region_num_rectangles (clip_region) == 1) clip_region = NULL; if (status == CAIRO_INT_STATUS_UNSUPPORTED) i965_shader_set_clip (&shader, clip); } status = cairo_device_acquire (dst->intel.drm.base.device); if (unlikely (status)) goto CLEANUP_SHADER; device = i965_device (dst); status = i965_shader_commit (&shader, device); if (unlikely (status)) goto CLEANUP_DEVICE; if (clip_region != NULL) { unsigned int n, num_rectangles; num_rectangles = cairo_region_num_rectangles (clip_region); for (n = 0; n < num_rectangles; n++) { cairo_rectangle_int_t rect; cairo_region_get_rectangle (clip_region, n, &rect); i965_shader_add_rectangle (&shader, rect.x, rect.y, rect.width, rect.height); } } else { i965_shader_add_rectangle (&shader, extents->bounded.x, extents->bounded.y, extents->bounded.width, extents->bounded.height); } if (! extents->is_bounded) status = i965_fixup_unbounded (dst, extents, clip); CLEANUP_DEVICE: cairo_device_release (&device->intel.base.base); CLEANUP_SHADER: i965_shader_fini (&shader); return status; }