tree cplus_expand_constant (tree cst) { switch (TREE_CODE (cst)) { case PTRMEM_CST: { tree type = TREE_TYPE (cst); tree member; /* Find the member. */ member = PTRMEM_CST_MEMBER (cst); /* We can't lower this until the class is complete. */ if (!COMPLETE_TYPE_P (DECL_CONTEXT (member))) return cst; if (TREE_CODE (member) == FIELD_DECL) { /* Find the offset for the field. */ cst = byte_position (member); while (!same_type_p (DECL_CONTEXT (member), TYPE_PTRMEM_CLASS_TYPE (type))) { /* The MEMBER must have been nestled within an anonymous aggregate contained in TYPE. Find the anonymous aggregate. */ member = lookup_anon_field (TYPE_PTRMEM_CLASS_TYPE (type), DECL_CONTEXT (member)); cst = size_binop (PLUS_EXPR, cst, byte_position (member)); } cst = fold (build_nop (type, cst)); } else { tree delta; tree pfn; expand_ptrmemfunc_cst (cst, &delta, &pfn); cst = build_ptrmemfunc1 (type, delta, pfn); } } break; case CONSTRUCTOR: { constructor_elt *elt; unsigned HOST_WIDE_INT idx; FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (cst), idx, elt) elt->value = cplus_expand_constant (elt->value); } default: /* There's nothing to do. */ break; } return cst; }
tree cplus_expand_constant (tree cst) { switch (TREE_CODE (cst)) { case PTRMEM_CST: { tree type = TREE_TYPE (cst); tree member; /* Find the member. */ member = PTRMEM_CST_MEMBER (cst); if (TREE_CODE (member) == FIELD_DECL) { /* Find the offset for the field. */ cst = byte_position (member); while (!same_type_p (DECL_CONTEXT (member), TYPE_PTRMEM_CLASS_TYPE (type))) { /* The MEMBER must have been nestled within an anonymous aggregate contained in TYPE. Find the anonymous aggregate. */ member = lookup_anon_field (TYPE_PTRMEM_CLASS_TYPE (type), DECL_CONTEXT (member)); cst = size_binop (PLUS_EXPR, cst, byte_position (member)); } cst = fold (build_nop (type, cst)); } else { tree delta; tree pfn; expand_ptrmemfunc_cst (cst, &delta, &pfn); cst = build_ptrmemfunc1 (type, delta, pfn); } } break; default: /* There's nothing to do. */ break; } return cst; }
tree build_ivar_list_initializer (tree type, tree field_decl) { vec<constructor_elt, va_gc> *inits = NULL; do { vec<constructor_elt, va_gc> *ivar = NULL; tree id; /* Set name. */ if (DECL_NAME (field_decl)) CONSTRUCTOR_APPEND_ELT (ivar, NULL_TREE, add_objc_string (DECL_NAME (field_decl), meth_var_names)); else /* Unnamed bit-field ivar (yuck). */ CONSTRUCTOR_APPEND_ELT (ivar, NULL_TREE, build_int_cst (NULL_TREE, 0)); /* Set type. */ id = add_objc_string (encode_field_decl (field_decl), meth_var_types); CONSTRUCTOR_APPEND_ELT (ivar, NULL_TREE, id); /* Set offset. */ CONSTRUCTOR_APPEND_ELT (ivar, NULL_TREE, byte_position (field_decl)); CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, objc_build_constructor (type, ivar)); do field_decl = DECL_CHAIN (field_decl); while (field_decl && TREE_CODE (field_decl) != FIELD_DECL); } while (field_decl); return objc_build_constructor (build_array_type (type, 0), inits); }
static void mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp, location_t location, tree dirflag) { tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; /* Don't instrument marked nodes. */ if (mf_marked_p (*tp)) return; t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: { /* This is trickier than it may first appear. The reason is that we are looking at expressions from the "inside out" at this point. We may have a complex nested aggregate/array expression (e.g. "a.b[i].c"), maybe with an indirection as the leftmost operator ("p->a.b.d"), where instrumentation is necessary. Or we may have an innocent "a.b.c" expression that must not be instrumented. We need to recurse all the way down the nesting structure to figure it out: looking just at the outer node is not enough. */ tree var; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var) || TREE_CODE (var) == MEM_REF) { base = TREE_OPERAND (var, 0); break; } else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) { var = TREE_OPERAND (var, 0); if (CONSTANT_CLASS_P (var) && TREE_CODE (var) != STRING_CST) return; } else { gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var) || component_ref_only) return; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base); addr = fold_build_pointer_plus_loc (location, addr, byte_position (field)); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type, fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type, fold_convert (mf_uintptr_type, addr), size), integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case MEM_REF: if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0))) return; addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case TARGET_MEM_REF: if (addr_expr_of_non_mem_decl_p (TMR_BASE (t))) return; addr = tree_mem_ref_addr (ptr_type_node, t); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case ARRAY_RANGE_REF: warning (OPT_Wmudflap, "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; case BIT_FIELD_REF: /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; /* If we're not dereferencing something, then the access must be ok. */ if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) return; bpu = bitsize_int (BITS_PER_UNIT); ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu); ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu); size = fold_convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop_loc (location, PLUS_EXPR, size, rem); size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu); size = fold_convert (sizetype, size); addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = fold_convert (ptr_type_node, addr); addr = fold_build_pointer_plus_loc (location, addr, ofs); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); } break; default: return; } mf_build_check_statement_for (base, limit, iter, location, dirflag); }
/* The method walks the node hierarchy to the topmost node. This is exactly how its done in mudflap and has been borrowed. */ static tree mf_walk_comp_ref(tree *tp, tree type, location_t location, \ tree *addr_store, tree *base_store) { tree var, t, addr, base, size; t = *tp; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var) || TREE_CODE (var) == MEM_REF) { base = TREE_OPERAND (var, 0); break; } else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) { var = TREE_OPERAND (var, 0); if (CONSTANT_CLASS_P (var) && TREE_CODE (var) != STRING_CST) return NULL_TREE; } else { DEBUGLOG("TREE_CODE(temp) : %s comp_ref_only = %d eligigle = %d\n", \ tree_code_name[(int)TREE_CODE(var)], component_ref_only, \ mf_decl_eligible_p(var)); gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == SSA_NAME /* TODO: Check this */ || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var)) //TODO is this needed? || component_ref_only) return NULL_TREE; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base); addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node, addr, fold_convert_loc (location, sizetype, byte_position (field))); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); if (addr_store) *addr_store = addr; if (base_store) *base_store = addr; return var; }