static void generate_memset_zero (gimple stmt, tree op0, tree nb_iter, gimple_stmt_iterator bsi) { tree addr_base, nb_bytes; bool res = false; gimple_seq stmt_list = NULL, stmts; gimple fn_call; tree mem, fn; struct data_reference *dr = XCNEW (struct data_reference); location_t loc = gimple_location (stmt); DR_STMT (dr) = stmt; DR_REF (dr) = op0; res = dr_analyze_innermost (dr, loop_containing_stmt (stmt)); gcc_assert (res && stride_of_unit_type_p (DR_STEP (dr), TREE_TYPE (op0))); nb_bytes = build_size_arg_loc (loc, nb_iter, op0, &stmt_list); addr_base = size_binop_loc (loc, PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr)); addr_base = fold_convert_loc (loc, sizetype, addr_base); /* Test for a negative stride, iterating over every element. */ if (tree_int_cst_sgn (DR_STEP (dr)) == -1) { addr_base = size_binop_loc (loc, MINUS_EXPR, addr_base, fold_convert_loc (loc, sizetype, nb_bytes)); addr_base = size_binop_loc (loc, PLUS_EXPR, addr_base, TYPE_SIZE_UNIT (TREE_TYPE (op0))); } addr_base = fold_build_pointer_plus_loc (loc, DR_BASE_ADDRESS (dr), addr_base); mem = force_gimple_operand (addr_base, &stmts, true, NULL); gimple_seq_add_seq (&stmt_list, stmts); fn = build_fold_addr_expr (builtin_decl_implicit (BUILT_IN_MEMSET)); fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes); gimple_seq_add_stmt (&stmt_list, fn_call); gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING); if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "generated memset zero\n"); free_data_ref (dr); }
static void mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp, location_t location, tree dirflag) { tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; /* Don't instrument marked nodes. */ if (mf_marked_p (*tp)) return; t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: { /* This is trickier than it may first appear. The reason is that we are looking at expressions from the "inside out" at this point. We may have a complex nested aggregate/array expression (e.g. "a.b[i].c"), maybe with an indirection as the leftmost operator ("p->a.b.d"), where instrumentation is necessary. Or we may have an innocent "a.b.c" expression that must not be instrumented. We need to recurse all the way down the nesting structure to figure it out: looking just at the outer node is not enough. */ tree var; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var) || TREE_CODE (var) == MEM_REF) { base = TREE_OPERAND (var, 0); break; } else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) { var = TREE_OPERAND (var, 0); if (CONSTANT_CLASS_P (var) && TREE_CODE (var) != STRING_CST) return; } else { gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var) || component_ref_only) return; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base); addr = fold_build_pointer_plus_loc (location, addr, byte_position (field)); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type, fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type, fold_convert (mf_uintptr_type, addr), size), integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case MEM_REF: if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0))) return; addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case TARGET_MEM_REF: if (addr_expr_of_non_mem_decl_p (TMR_BASE (t))) return; addr = tree_mem_ref_addr (ptr_type_node, t); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); break; case ARRAY_RANGE_REF: warning (OPT_Wmudflap, "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; case BIT_FIELD_REF: /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; /* If we're not dereferencing something, then the access must be ok. */ if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) return; bpu = bitsize_int (BITS_PER_UNIT); ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu); ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu); size = fold_convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop_loc (location, PLUS_EXPR, size, rem); size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu); size = fold_convert (sizetype, size); addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = fold_convert (ptr_type_node, addr); addr = fold_build_pointer_plus_loc (location, addr, ofs); base = addr; limit = fold_build_pointer_plus_hwi_loc (location, fold_build_pointer_plus_loc (location, base, size), -1); } break; default: return; } mf_build_check_statement_for (base, limit, iter, location, dirflag); }
tree gfc_build_array_ref (tree base, tree offset, tree decl) { tree type = TREE_TYPE (base); tree tmp; tree span; if (GFC_ARRAY_TYPE_P (type) && GFC_TYPE_ARRAY_RANK (type) == 0) { gcc_assert (GFC_TYPE_ARRAY_CORANK (type) > 0); return fold_convert (TYPE_MAIN_VARIANT (type), base); } /* Scalar coarray, there is nothing to do. */ if (TREE_CODE (type) != ARRAY_TYPE) { gcc_assert (decl == NULL_TREE); gcc_assert (integer_zerop (offset)); return base; } type = TREE_TYPE (type); if (DECL_P (base)) TREE_ADDRESSABLE (base) = 1; /* Strip NON_LVALUE_EXPR nodes. */ STRIP_TYPE_NOPS (offset); /* If the array reference is to a pointer, whose target contains a subreference, use the span that is stored with the backend decl and reference the element with pointer arithmetic. */ if (decl && (TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) && ((GFC_DECL_SUBREF_ARRAY_P (decl) && !integer_zerop (GFC_DECL_SPAN(decl))) || GFC_DECL_CLASS (decl))) { if (GFC_DECL_CLASS (decl)) { /* Allow for dummy arguments and other good things. */ if (POINTER_TYPE_P (TREE_TYPE (decl))) decl = build_fold_indirect_ref_loc (input_location, decl); /* Check if '_data' is an array descriptor. If it is not, the array must be one of the components of the class object, so return a normal array reference. */ if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (gfc_class_data_get (decl)))) return build4_loc (input_location, ARRAY_REF, type, base, offset, NULL_TREE, NULL_TREE); span = gfc_vtable_size_get (decl); } else if (GFC_DECL_SUBREF_ARRAY_P (decl)) span = GFC_DECL_SPAN(decl); else gcc_unreachable (); offset = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, offset, span); tmp = gfc_build_addr_expr (pvoid_type_node, base); tmp = fold_build_pointer_plus_loc (input_location, tmp, offset); tmp = fold_convert (build_pointer_type (type), tmp); if (!TYPE_STRING_FLAG (type)) tmp = build_fold_indirect_ref_loc (input_location, tmp); return tmp; } else /* Otherwise use a straightforward array reference. */ return build4_loc (input_location, ARRAY_REF, type, base, offset, NULL_TREE, NULL_TREE); }