/* Compute value of PTR and put it into address RES. PTR has to be ADDR_EXPR. */ static void chkp_collect_addr_value (tree ptr, address_t &res) { tree obj = TREE_OPERAND (ptr, 0); address_t addr; switch (TREE_CODE (obj)) { case INDIRECT_REF: chkp_collect_value (TREE_OPERAND (obj, 0), res); break; case MEM_REF: chkp_collect_value (TREE_OPERAND (obj, 0), res); addr.pol.create (0); chkp_collect_value (TREE_OPERAND (obj, 1), addr); chkp_add_addr_addr (res, addr); addr.pol.release (); break; case ARRAY_REF: chkp_collect_value (build_fold_addr_expr (TREE_OPERAND (obj, 0)), res); addr.pol.create (0); chkp_collect_value (TREE_OPERAND (obj, 1), addr); chkp_mult_addr (addr, array_ref_element_size (obj)); chkp_add_addr_addr (res, addr); addr.pol.release (); break; case COMPONENT_REF: { tree str = TREE_OPERAND (obj, 0); tree field = TREE_OPERAND (obj, 1); chkp_collect_value (build_fold_addr_expr (str), res); addr.pol.create (0); chkp_collect_value (component_ref_field_offset (obj), addr); chkp_add_addr_addr (res, addr); addr.pol.release (); if (DECL_FIELD_BIT_OFFSET (field)) { addr.pol.create (0); chkp_collect_value (fold_build2 (TRUNC_DIV_EXPR, size_type_node, DECL_FIELD_BIT_OFFSET (field), size_int (BITS_PER_UNIT)), addr); chkp_add_addr_addr (res, addr); addr.pol.release (); } } break; default: chkp_add_addr_item (res, integer_one_node, ptr); break; } }
static bool idx_analyze_ref (tree base, tree *index, void *data) { struct ar_data *ar_data = data; tree ibase, step, stepsize; HOST_WIDE_INT istep, idelta = 0, imult = 1; affine_iv iv; if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF || TREE_CODE (base) == ALIGN_INDIRECT_REF) return false; if (!simple_iv (ar_data->loop, ar_data->stmt, *index, &iv, false)) return false; ibase = iv.base; step = iv.step; if (zero_p (step)) istep = 0; else { if (!cst_and_fits_in_hwi (step)) return false; istep = int_cst_value (step); } if (TREE_CODE (ibase) == PLUS_EXPR && cst_and_fits_in_hwi (TREE_OPERAND (ibase, 1))) { idelta = int_cst_value (TREE_OPERAND (ibase, 1)); ibase = TREE_OPERAND (ibase, 0); } if (cst_and_fits_in_hwi (ibase)) { idelta += int_cst_value (ibase); ibase = build_int_cst (TREE_TYPE (ibase), 0); } if (TREE_CODE (base) == ARRAY_REF) { stepsize = array_ref_element_size (base); if (!cst_and_fits_in_hwi (stepsize)) return false; imult = int_cst_value (stepsize); istep *= imult; idelta *= imult; } *ar_data->step += istep; *ar_data->delta += idelta; *index = ibase; return true; }
static tree compute_object_offset (const_tree expr, const_tree var) { enum tree_code code = PLUS_EXPR; tree base, off, t; if (expr == var) return size_zero_node; switch (TREE_CODE (expr)) { case COMPONENT_REF: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; t = TREE_OPERAND (expr, 1); off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) / BITS_PER_UNIT)); break; case REALPART_EXPR: CASE_CONVERT: case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR: return compute_object_offset (TREE_OPERAND (expr, 0), var); case IMAGPART_EXPR: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); break; case ARRAY_REF: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; t = TREE_OPERAND (expr, 1); tree low_bound, unit_size; low_bound = array_ref_low_bound (CONST_CAST_TREE (expr)); unit_size = array_ref_element_size (CONST_CAST_TREE (expr)); if (! integer_zerop (low_bound)) t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound); if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) { code = MINUS_EXPR; t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); } t = fold_convert (sizetype, t); off = size_binop (MULT_EXPR, unit_size, t); break; case MEM_REF: gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); return wide_int_to_tree (sizetype, mem_ref_offset (expr)); default: return error_mark_node; } return size_binop (code, base, off); }