void *dm_pool_alloc_aligned(struct dm_pool *p, size_t s, unsigned alignment) { struct chunk *c = p->chunk; void *r; /* realign begin */ if (c) _align_chunk(c, alignment); /* have we got room ? */ if (!c || (c->begin > c->end) || (c->end - c->begin < s)) { /* allocate new chunk */ size_t needed = s + alignment + sizeof(struct chunk); c = _new_chunk(p, (needed > p->chunk_size) ? needed : p->chunk_size); if (!c) return NULL; _align_chunk(c, alignment); } r = c->begin; c->begin += s; #ifdef VALGRIND_POOL VALGRIND_MAKE_MEM_UNDEFINED(r, s); #endif return r; }
void flush_xroute(struct xroute *xroute) { int i; i = xroute - xroutes; assert(i >= 0 && i < numxroutes); local_notify_xroute(xroute, LOCAL_FLUSH); if(i != numxroutes - 1) memcpy(xroutes + i, xroutes + numxroutes - 1, sizeof(struct xroute)); numxroutes--; VALGRIND_MAKE_MEM_UNDEFINED(xroutes + numxroutes, sizeof(struct xroute)); if(numxroutes == 0) { free(xroutes); xroutes = NULL; maxxroutes = 0; } else if(maxxroutes > 8 && numxroutes < maxxroutes / 4) { struct xroute *new_xroutes; int n = maxxroutes / 2; new_xroutes = realloc(xroutes, n * sizeof(struct xroute)); if(new_xroutes == NULL) return; xroutes = new_xroutes; maxxroutes = n; } }
void _cairo_region_fini (cairo_region_t *region) { assert (! CAIRO_REFERENCE_COUNT_HAS_REFERENCE (®ion->ref_count)); pixman_region32_fini (®ion->rgn); VG (VALGRIND_MAKE_MEM_UNDEFINED (region, sizeof (cairo_region_t))); }
cairo_status_t _cairo_pen_init_copy (cairo_pen_t *pen, const cairo_pen_t *other) { VG (VALGRIND_MAKE_MEM_UNDEFINED (pen, sizeof (cairo_pen_t))); *pen = *other; if (CAIRO_INJECT_FAULT ()) return _cairo_error (CAIRO_STATUS_NO_MEMORY); pen->vertices = pen->vertices_embedded; if (pen->num_vertices) { if (pen->num_vertices > ARRAY_LENGTH (pen->vertices_embedded)) { pen->vertices = _cairo_malloc_ab (pen->num_vertices, sizeof (cairo_pen_vertex_t)); if (unlikely (pen->vertices == NULL)) return _cairo_error (CAIRO_STATUS_NO_MEMORY); } memcpy (pen->vertices, other->vertices, pen->num_vertices * sizeof (cairo_pen_vertex_t)); } return CAIRO_STATUS_SUCCESS; }
/* Case 3 - memory definedness doesn't survive remapping */ static void test3() { char *m = mm(0, pgsz * 5, PROT_READ|PROT_WRITE); VALGRIND_MAKE_MEM_UNDEFINED(&m[pgsz], pgsz); mm(&m[pgsz], pgsz, PROT_READ); VALGRIND_CHECK_MEM_IS_DEFINED(&m[pgsz], pgsz); /* OK */ }
void _cairo_traps_fini (cairo_traps_t *traps) { if (traps->traps != traps->traps_embedded) free (traps->traps); VG (VALGRIND_MAKE_MEM_UNDEFINED (traps, sizeof (cairo_traps_t))); }
/* Restores a previously saved frame onto the associated stack. */ static void restore_frame(struct cocore *target) { struct stack *stack = target->stack; void *frame_start = FRAME_START(stack->stack_base, target->frame); VALGRIND_MAKE_MEM_UNDEFINED(frame_start, target->saved_length); memcpy(frame_start, target->saved_frame, target->saved_length); stack->current = target; }
void valgrindMakeMemUndefined(uintptr_t address, uintptr_t size) { #if defined(VALGRIND_REQUEST_LOGS) VALGRIND_PRINTF_BACKTRACE("Marking an area as undefined at 0x%lx of size %lu\n", address, size); #endif /* defined(VALGRIND_REQUEST_LOGS) */ VALGRIND_MAKE_MEM_UNDEFINED(address, size); }
void base_node_dealloc(extent_node_t *node) { VALGRIND_MAKE_MEM_UNDEFINED(node, sizeof(extent_node_t)); malloc_mutex_lock(&base_mtx); *(extent_node_t **)node = base_nodes; base_nodes = node; malloc_mutex_unlock(&base_mtx); }
void _cairo_pen_fini (cairo_pen_t *pen) { if (pen->vertices != pen->vertices_embedded) free (pen->vertices); VG (VALGRIND_MAKE_MEM_UNDEFINED (pen, sizeof (cairo_pen_t))); }
void _cairo_region_init (cairo_region_t *region) { VG (VALGRIND_MAKE_MEM_UNDEFINED (region, sizeof (cairo_region_t))); region->status = CAIRO_STATUS_SUCCESS; CAIRO_REFERENCE_COUNT_INIT (®ion->ref_count, 0); pixman_region32_init (®ion->rgn); }
int main (int argc, char*argv[]) { size_t def_size = 1<<20; char *p; char *new_p; if (argc > 10000) def_size = def_size * 2; { size_t size = def_size; (void) VALGRIND_MAKE_MEM_UNDEFINED(&size, 1); p = malloc(size); } (void) VALGRIND_MAKE_MEM_UNDEFINED(&p, 1); new_p = realloc(p, def_size); (void) VALGRIND_MAKE_MEM_UNDEFINED(&new_p, 1); new_p = realloc(new_p, def_size); (void) VALGRIND_MAKE_MEM_UNDEFINED(&new_p, 1); free (new_p); { size_t nmemb = 1; (void) VALGRIND_MAKE_MEM_UNDEFINED(&nmemb, 1); new_p = calloc(nmemb, def_size); free (new_p); } #if 0 { size_t alignment = 1; (void) VALGRIND_MAKE_MEM_UNDEFINED(&alignment, 1); new_p = memalign(alignment, def_size); free(new_p); } { size_t nmemb = 16; size_t size = def_size; (void) VALGRIND_MAKE_MEM_UNDEFINED(&size, 1); new_p = memalign(nmemb, size); free(new_p); } { size_t size = def_size; (void) VALGRIND_MAKE_MEM_UNDEFINED(&size, 1); new_p = valloc(size); free (new_p); } #endif return 0; }
void _cairo_freelist_free (cairo_freelist_t *freelist, void *voidnode) { cairo_freelist_node_t *node = voidnode; if (node) { node->next = freelist->first_free_node; freelist->first_free_node = node; VG (VALGRIND_MAKE_MEM_UNDEFINED (node, freelist->nodesize)); } }
void _cairo_stroke_style_fini (cairo_stroke_style_t *style) { free (style->dash); style->dash = NULL; style->num_dashes = 0; VG (VALGRIND_MAKE_MEM_UNDEFINED (style, sizeof (cairo_stroke_style_t))); }
cairo_status_t _cairo_pen_init (cairo_pen_t *pen, double radius, double tolerance, const cairo_matrix_t *ctm) { int i; int reflect; if (CAIRO_INJECT_FAULT ()) return _cairo_error (CAIRO_STATUS_NO_MEMORY); VG (VALGRIND_MAKE_MEM_UNDEFINED (pen, sizeof (cairo_pen_t))); pen->radius = radius; pen->tolerance = tolerance; reflect = _cairo_matrix_compute_determinant (ctm) < 0.; pen->num_vertices = _cairo_pen_vertices_needed (tolerance, radius, ctm); if (pen->num_vertices > ARRAY_LENGTH (pen->vertices_embedded)) { pen->vertices = _cairo_malloc_ab (pen->num_vertices, sizeof (cairo_pen_vertex_t)); if (unlikely (pen->vertices == NULL)) return _cairo_error (CAIRO_STATUS_NO_MEMORY); } else { pen->vertices = pen->vertices_embedded; } /* * Compute pen coordinates. To generate the right ellipse, compute points around * a circle in user space and transform them to device space. To get a consistent * orientation in device space, flip the pen if the transformation matrix * is reflecting */ for (i=0; i < pen->num_vertices; i++) { cairo_pen_vertex_t *v = &pen->vertices[i]; double theta = 2 * M_PI * i / (double) pen->num_vertices, dx, dy; if (reflect) theta = -theta; dx = radius * cos (theta); dy = radius * sin (theta); cairo_matrix_transform_distance (ctm, &dx, &dy); v->point.x = _cairo_fixed_from_double (dx); v->point.y = _cairo_fixed_from_double (dy); } _cairo_pen_compute_slopes (pen); return CAIRO_STATUS_SUCCESS; }
void flush_route(struct babel_route *route) { int i; struct source *src; unsigned oldmetric; int lost = 0; oldmetric = route_metric(route); src = route->src; if(route->installed) { uninstall_route(route); lost = 1; } i = find_route_slot(route->src->prefix, route->src->plen, route->src->src_prefix, route->src->src_plen, NULL); assert(i >= 0 && i < route_slots); local_notify_route(route, LOCAL_FLUSH); if(route == routes[i]) { routes[i] = route->next; route->next = NULL; destroy_route(route); if(routes[i] == NULL) { if(i < route_slots - 1) memmove(routes + i, routes + i + 1, (route_slots - i - 1) * sizeof(struct babel_route*)); routes[route_slots - 1] = NULL; route_slots--; VALGRIND_MAKE_MEM_UNDEFINED(routes + route_slots, sizeof(struct route *)); } if(route_slots == 0) resize_route_table(0); else if(max_route_slots > 8 && route_slots < max_route_slots / 4) resize_route_table(max_route_slots / 2); } else { struct babel_route *r = routes[i]; while(r->next != route) r = r->next; r->next = route->next; route->next = NULL; destroy_route(route); } if(lost) route_lost(src, oldmetric); release_source(src); }
void _cairo_region_init_rectangle (cairo_region_t *region, const cairo_rectangle_int_t *rectangle) { VG (VALGRIND_MAKE_MEM_UNDEFINED (region, sizeof (cairo_region_t))); region->status = CAIRO_STATUS_SUCCESS; CAIRO_REFERENCE_COUNT_INIT (®ion->ref_count, 0); pixman_region32_init_rect (®ion->rgn, rectangle->x, rectangle->y, rectangle->width, rectangle->height); }
void reuse_valgrind_stack(stk_seg *stk, uint8_t *sp) { #ifndef NVALGRIND // Establish that the stack is accessible. This must be done when reusing // old stack segments, since the act of popping the stack previously // caused valgrind to consider the whole thing inaccessible. assert(sp >= stk->data && sp <= (uint8_t*) stk->end && "Stack pointer must be inside stack segment"); size_t sz = stk->end - (uintptr_t)sp; VALGRIND_MAKE_MEM_UNDEFINED(sp, sz); #endif }
int dm_pool_grow_object(struct dm_pool *p, const void *extra, size_t delta) { struct chunk *c = p->chunk, *nc; if (!delta) delta = strlen(extra); if (c->end - (c->begin + p->object_len) < delta) { /* move into a new chunk */ if (p->object_len + delta > (p->chunk_size / 2)) nc = _new_chunk(p, (p->object_len + delta) * 2); else nc = _new_chunk(p, p->chunk_size); if (!nc) return 0; _align_chunk(p->chunk, p->object_alignment); #ifdef VALGRIND_POOL VALGRIND_MAKE_MEM_UNDEFINED(p->chunk->begin, p->object_len); #endif memcpy(p->chunk->begin, c->begin, p->object_len); #ifdef VALGRIND_POOL VALGRIND_MAKE_MEM_NOACCESS(c->begin, p->object_len); #endif c = p->chunk; } #ifdef VALGRIND_POOL VALGRIND_MAKE_MEM_UNDEFINED(p->chunk->begin + p->object_len, delta); #endif memcpy(c->begin + p->object_len, extra, delta); p->object_len += delta; return 1; }
static inline void sec_clear_noaccess (void *memory, size_t from, size_t to) { char *ptr = memory; ASSERT (from <= to); #ifdef WITH_VALGRIND VALGRIND_MAKE_MEM_UNDEFINED (ptr + from, to - from); #endif memset (ptr + from, 0, to - from); #ifdef WITH_VALGRIND VALGRIND_MAKE_MEM_NOACCESS (ptr + from, to - from); #endif }
int main (int argc, char *argv[]) { char *main_name __attribute__((unused)) = "main name"; pthread_t ebbr, egll, zzzz; int i = 1234; char undef = '?'; char *some_mem __attribute__((unused)) = malloc(100); VALGRIND_MAKE_MEM_UNDEFINED(&undef, 1); int len = strlen(undefined); breakme(__LINE__); for (i = len-1; i >= 0; i=i-2) undefined[i] = undef; *(char*)&int_und = undef; breakme(__LINE__); if (argc > 1) sleeps = atoi(argv[1]); level(); make_error ("called from main"); pthread_create(&ebbr, NULL, brussels_fn, NULL); pthread_create(&egll, NULL, london_fn, NULL); pthread_create(&zzzz, NULL, petaouchnok_fn, NULL); loopmain = 1; while (! (loopt1 && loopt2 && loopmain)) loopmain++; for (i = 0; i < LOOPS; i++) { loopmain++; if (loopmain == 10000) make_error ("in main loop"); } pthread_join(ebbr, NULL); make_error ("called from main (the end, before joining t3)"); pthread_join(zzzz, NULL); if (argc > 2) { for (i = 0; i < 100; i++) if ((*(&undef + i*4000) == 0) || (*(&undef - i*4000) == 0)) { printf ("there are some null bytes here and there %d\n", i); fflush(stdout); } } exit(0); }
void _cairo_stroke_style_init (cairo_stroke_style_t *style) { VG (VALGRIND_MAKE_MEM_UNDEFINED (style, sizeof (cairo_stroke_style_t))); style->line_width = CAIRO_GSTATE_LINE_WIDTH_DEFAULT; style->line_cap = CAIRO_GSTATE_LINE_CAP_DEFAULT; style->line_join = CAIRO_GSTATE_LINE_JOIN_DEFAULT; style->miter_limit = CAIRO_GSTATE_MITER_LIMIT_DEFAULT; style->dash = NULL; style->num_dashes = 0; style->dash_offset = 0.0; }
void examsgRngInit(exa_ringbuf_t *rng, size_t size) { memset(rng, 0, size); #ifdef HAVE_VALGRIND_MEMCHECK_H VALGRIND_MAKE_MEM_UNDEFINED(rng, size); #endif rng->magic = EXAMSG_RNG_MAGIC; rng->pRd = 0; rng->pWr = 0; rng->size = size - sizeof(exa_ringbuf_t); examsgRngStatsReset(rng); }
void local_socket_destroy(int i) { if(i < 0 || i >= num_local_sockets) { fprintf(stderr, "Internal error: closing unknown local socket.\n"); return; } free(local_sockets[i].buf); close(local_sockets[i].fd); local_sockets[i] = local_sockets[--num_local_sockets]; VALGRIND_MAKE_MEM_UNDEFINED(local_sockets + num_local_sockets, sizeof(struct local_socket)); }
int __recvpath psmi_mq_handle_data(psm_mq_req_t req, psm_epaddr_t epaddr, const void *buf, uint32_t nbytes) { psm_mq_t mq = req->mq; int rc; if (req->state == MQ_STATE_MATCHED) rc = MQ_RET_MATCH_OK; else { psmi_assert(req->state == MQ_STATE_UNEXP); rc = MQ_RET_UNEXP_OK; } psmi_mq_req_copy(req, epaddr, buf, nbytes); if (req->send_msgoff == req->send_msglen) { if (req->type & MQE_TYPE_EGRLONG) { int flowid = req->egrid.egr_flowid; psmi_assert(STAILQ_FIRST(&epaddr->egrlong[flowid]) == req); STAILQ_REMOVE_HEAD(&epaddr->egrlong[flowid], nextq); } /* Whatever is leftover in the posted message should be now marked as * undefined. * XXX Sends not supported yet. */ #if 0 #ifdef PSM_VALGRIND if (req->send_msglen < req->buf_len) VALGRIND_MAKE_MEM_UNDEFINED( (void *) ((uintptr_t) req->buf + req->send_msglen), req->buf_len - req->send_msglen); #endif #endif if (req->state == MQ_STATE_MATCHED) { req->state = MQ_STATE_COMPLETE; mq_qq_append(&mq->completed_q, req); } else { /* MQ_STATE_UNEXP */ req->state = MQ_STATE_COMPLETE; } _IPATH_VDBG("epaddr=%s completed %d byte send, state=%d\n", psmi_epaddr_get_name(epaddr->epid), (int)req->send_msglen, req->state); } return rc; }
void _cairo_freepool_init (cairo_freepool_t *freepool, unsigned nodesize) { freepool->first_free_node = NULL; freepool->pools = &freepool->embedded_pool; freepool->freepools = NULL; freepool->nodesize = nodesize; freepool->embedded_pool.next = NULL; freepool->embedded_pool.size = sizeof (freepool->embedded_data); freepool->embedded_pool.rem = sizeof (freepool->embedded_data); freepool->embedded_pool.data = freepool->embedded_data; VG (VALGRIND_MAKE_MEM_UNDEFINED (freepool->embedded_data, sizeof (freepool->embedded_data))); }
/* Case 5 - mprotect doesn't affect definedness */ static void test5() { char *m = mm(0, pgsz * 5, PROT_READ|PROT_WRITE); VALGRIND_MAKE_MEM_UNDEFINED(m, pgsz*5); memset(m, 'x', 10); VALGRIND_CHECK_MEM_IS_DEFINED(m, 10); /* OK */ VALGRIND_CHECK_MEM_IS_DEFINED(m+10, 10); /* BAD */ mprotect(m, pgsz*5, PROT_NONE); mprotect(m, pgsz*5, PROT_READ); VALGRIND_CHECK_MEM_IS_DEFINED(m, 10); /* still OK */ VALGRIND_CHECK_MEM_IS_DEFINED(m+20, 10); /* BAD */ }
static void _free_chunk(struct chunk *c) { #ifdef VALGRIND_POOL # ifdef DEBUG_MEM if (c) VALGRIND_MAKE_MEM_UNDEFINED(c + 1, c->end - (char *) (c + 1)); # endif #endif #ifdef DEBUG_ENFORCE_POOL_LOCKING /* since DEBUG_MEM is using own memory list */ free(c); /* for posix_memalign() */ #else dm_free(c); #endif }
void * _cairo_freelist_alloc (cairo_freelist_t *freelist) { if (freelist->first_free_node) { cairo_freelist_node_t *node; node = freelist->first_free_node; VG (VALGRIND_MAKE_MEM_DEFINED (node, sizeof (node->next))); freelist->first_free_node = node->next; VG (VALGRIND_MAKE_MEM_UNDEFINED (node, freelist->nodesize)); return node; } return malloc (freelist->nodesize); }
cairo_status_t _cairo_polygon_init_boxes (cairo_polygon_t *polygon, const cairo_boxes_t *boxes) { const struct _cairo_boxes_chunk *chunk; int i; VG (VALGRIND_MAKE_MEM_UNDEFINED (polygon, sizeof (cairo_polygon_t))); polygon->status = CAIRO_STATUS_SUCCESS; polygon->num_edges = 0; polygon->edges = polygon->edges_embedded; polygon->edges_size = ARRAY_LENGTH (polygon->edges_embedded); if (boxes->num_boxes > ARRAY_LENGTH (polygon->edges_embedded)/2) { polygon->edges_size = 2 * boxes->num_boxes; polygon->edges = _cairo_malloc_ab (polygon->edges_size, 2*sizeof(cairo_edge_t)); if (unlikely (polygon->edges == XNULL)) return polygon->status = _cairo_error (CAIRO_STATUS_NO_MEMORY); } polygon->extents.p1.x = polygon->extents.p1.y = INT32_MAX; polygon->extents.p2.x = polygon->extents.p2.y = INT32_MIN; polygon->limits = XNULL; polygon->num_limits = 0; for (chunk = &boxes->chunks; chunk != XNULL; chunk = chunk->next) { for (i = 0; i < chunk->count; i++) { cairo_point_t p1, p2; p1 = chunk->base[i].p1; p2.x = p1.x; p2.y = chunk->base[i].p2.y; _cairo_polygon_add_edge (polygon, &p1, &p2, 1); p1 = chunk->base[i].p2; p2.x = p1.x; p2.y = chunk->base[i].p1.y; _cairo_polygon_add_edge (polygon, &p1, &p2, 1); } } return polygon->status; }