tree expand_start_catch_block (tree decl) { tree exp; tree type, init; if (! doing_eh ()) return NULL_TREE; if (decl) { if (!is_admissible_throw_operand_or_catch_parameter (decl, false)) decl = error_mark_node; type = prepare_eh_type (TREE_TYPE (decl)); mark_used (eh_type_info (type)); } else type = NULL_TREE; if (decl && decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ exp = build_exc_ptr (); exp = build1 (NOP_EXPR, build_pointer_type (type), exp); exp = fold_build_pointer_plus (exp, fold_build1_loc (input_location, NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (TREE_TYPE (exp)))); exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error); initialize_handler_parm (decl, exp); return type; } /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); init = do_begin_catch (); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL || decl == error_mark_node || init == error_mark_node) finish_expr_stmt (init); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); finish_expr_stmt (init); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { tree init_type = type; /* Pointers are passed by values, everything else by reference. */ if (!TYPE_PTR_P (type)) init_type = build_pointer_type (type); if (init_type != TREE_TYPE (init)) init = build1 (NOP_EXPR, init_type, init); exp = create_temporary_var (init_type); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, /*init_const_expr=*/false, NULL_TREE, LOOKUP_ONLYCONVERTING); initialize_handler_parm (decl, exp); } return type; }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, location_t location) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; gimple unregister_fncall, register_fncall; tree unregister_fncall_param, register_fncall_param; /* Variable-sized objects should have sizes already been gimplified when we got here. */ size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); gcc_assert (is_gimple_val (size)); unregister_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3, unregister_fncall_param, size, integer_three_node); variable_name = mf_varname_tree (decl); register_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ register_fncall = gimple_build_call (mf_register_fndecl, 4, register_fncall_param, size, integer_three_node, variable_name); /* Accumulate the two calls. */ gimple_set_location (register_fncall, location); gimple_set_location (unregister_fncall, location); /* Add the __mf_register call at the current appending point. */ if (gsi_end_p (initially_stmts)) { if (!mf_artificial (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ gimple_seq_add_stmt (&finally_stmts, unregister_fncall); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = NULL; gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. */ static void mx_register_decls (tree decl, tree *stmt_list) { tree finally_stmts = NULL_TREE; tree_stmt_iterator initially_stmts = tsi_start (*stmt_list); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; tree unregister_fncall, unregister_fncall_params; tree register_fncall, register_fncall_params; size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), NULL_TREE))); /* __mf_unregister (...) */ unregister_fncall = build_function_call_expr (mf_unregister_fndecl, unregister_fncall_params); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ variable_name = mf_varname_tree (decl); register_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), tree_cons (NULL_TREE, variable_name, NULL_TREE)))); /* __mf_register (...) */ register_fncall = build_function_call_expr (mf_register_fndecl, register_fncall_params); /* Accumulate the two calls. */ /* ??? Set EXPR_LOCATION. */ gimplify_stmt (®ister_fncall); gimplify_stmt (&unregister_fncall); /* Add the __mf_register call at the current appending point. */ if (tsi_end_p (initially_stmts)) warning (0, "mudflap cannot track %qs in stub function", IDENTIFIER_POINTER (DECL_NAME (decl))); else { tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT); /* Accumulate the FINALLY piece. */ append_to_statement_list (unregister_fncall, &finally_stmts); } mf_mark (decl); } decl = TREE_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL_TREE) { tree t = build2 (TRY_FINALLY_EXPR, void_type_node, *stmt_list, finally_stmts); *stmt_list = NULL; append_to_statement_list (t, stmt_list); } }
/* Define NAME with value TYPE size_unit. */ static void builtin_define_type_sizeof (const char *name, tree type) { builtin_define_with_int_value (name, tree_low_cst (TYPE_SIZE_UNIT (type), 1)); }
static tree build_common_decl (gfc_common_head *com, tree union_type, bool is_init) { tree decl, identifier; identifier = gfc_sym_mangled_common_id (com); decl = gfc_map_of_all_commons.count(identifier) ? gfc_map_of_all_commons[identifier] : NULL_TREE; /* Update the size of this common block as needed. */ if (decl != NULL_TREE) { tree size = TYPE_SIZE_UNIT (union_type); /* Named common blocks of the same name shall be of the same size in all scoping units of a program in which they appear, but blank common blocks may be of different sizes. */ if (!tree_int_cst_equal (DECL_SIZE_UNIT (decl), size) && strcmp (com->name, BLANK_COMMON_NAME)) gfc_warning ("Named COMMON block '%s' at %L shall be of the " "same size as elsewhere (%lu vs %lu bytes)", com->name, &com->where, (unsigned long) TREE_INT_CST_LOW (size), (unsigned long) TREE_INT_CST_LOW (DECL_SIZE_UNIT (decl))); if (tree_int_cst_lt (DECL_SIZE_UNIT (decl), size)) { DECL_SIZE (decl) = TYPE_SIZE (union_type); DECL_SIZE_UNIT (decl) = size; DECL_MODE (decl) = TYPE_MODE (union_type); TREE_TYPE (decl) = union_type; layout_decl (decl, 0); } } /* If this common block has been declared in a previous program unit, and either it is already initialized or there is no new initialization for it, just return. */ if ((decl != NULL_TREE) && (!is_init || DECL_INITIAL (decl))) return decl; /* If there is no backend_decl for the common block, build it. */ if (decl == NULL_TREE) { if (com->is_bind_c == 1 && com->binding_label) decl = build_decl (input_location, VAR_DECL, identifier, union_type); else { decl = build_decl (input_location, VAR_DECL, get_identifier (com->name), union_type); gfc_set_decl_assembler_name (decl, identifier); } TREE_PUBLIC (decl) = 1; TREE_STATIC (decl) = 1; DECL_IGNORED_P (decl) = 1; if (!com->is_bind_c) DECL_ALIGN (decl) = BIGGEST_ALIGNMENT; else { /* Do not set the alignment for bind(c) common blocks to BIGGEST_ALIGNMENT because that won't match what C does. Also, for common blocks with one element, the alignment must be that of the field within the common block in order to match what C will do. */ tree field = NULL_TREE; field = TYPE_FIELDS (TREE_TYPE (decl)); if (DECL_CHAIN (field) == NULL_TREE) DECL_ALIGN (decl) = TYPE_ALIGN (TREE_TYPE (field)); } DECL_USER_ALIGN (decl) = 0; GFC_DECL_COMMON_OR_EQUIV (decl) = 1; gfc_set_decl_location (decl, &com->where); if (com->threadprivate) DECL_TLS_MODEL (decl) = decl_default_tls_model (decl); /* Place the back end declaration for this common block in GLOBAL_BINDING_LEVEL. */ gfc_map_of_all_commons[identifier] = pushdecl_top_level (decl); } /* Has no initial values. */ if (!is_init) { DECL_INITIAL (decl) = NULL_TREE; DECL_COMMON (decl) = 1; DECL_DEFER_OUTPUT (decl) = 1; } else { DECL_INITIAL (decl) = error_mark_node; DECL_COMMON (decl) = 0; DECL_DEFER_OUTPUT (decl) = 0; } return decl; }
tree make_thunk (tree function, bool this_adjusting, tree fixed_offset, tree virtual_offset) { HOST_WIDE_INT d; tree thunk; gcc_assert (TREE_CODE (function) == FUNCTION_DECL); /* We can have this thunks to covariant thunks, but not vice versa. */ gcc_assert (!DECL_THIS_THUNK_P (function)); gcc_assert (!DECL_RESULT_THUNK_P (function) || this_adjusting); /* Scale the VIRTUAL_OFFSET to be in terms of bytes. */ if (this_adjusting && virtual_offset) virtual_offset = size_binop (MULT_EXPR, virtual_offset, convert (ssizetype, TYPE_SIZE_UNIT (vtable_entry_type))); d = tree_low_cst (fixed_offset, 0); /* See if we already have the thunk in question. For this_adjusting thunks VIRTUAL_OFFSET will be an INTEGER_CST, for covariant thunks it will be a BINFO. */ for (thunk = DECL_THUNKS (function); thunk; thunk = TREE_CHAIN (thunk)) if (DECL_THIS_THUNK_P (thunk) == this_adjusting && THUNK_FIXED_OFFSET (thunk) == d && !virtual_offset == !THUNK_VIRTUAL_OFFSET (thunk) && (!virtual_offset || (this_adjusting ? tree_int_cst_equal (THUNK_VIRTUAL_OFFSET (thunk), virtual_offset) : THUNK_VIRTUAL_OFFSET (thunk) == virtual_offset))) return thunk; /* All thunks must be created before FUNCTION is actually emitted; the ABI requires that all thunks be emitted together with the function to which they transfer control. */ gcc_assert (!TREE_ASM_WRITTEN (function)); /* Likewise, we can only be adding thunks to a function declared in the class currently being laid out. */ gcc_assert (TYPE_SIZE (DECL_CONTEXT (function)) && TYPE_BEING_DEFINED (DECL_CONTEXT (function))); thunk = build_decl (FUNCTION_DECL, NULL_TREE, TREE_TYPE (function)); DECL_LANG_SPECIFIC (thunk) = DECL_LANG_SPECIFIC (function); cxx_dup_lang_specific_decl (thunk); DECL_THUNKS (thunk) = NULL_TREE; DECL_CONTEXT (thunk) = DECL_CONTEXT (function); TREE_READONLY (thunk) = TREE_READONLY (function); TREE_THIS_VOLATILE (thunk) = TREE_THIS_VOLATILE (function); TREE_PUBLIC (thunk) = TREE_PUBLIC (function); SET_DECL_THUNK_P (thunk, this_adjusting); THUNK_TARGET (thunk) = function; THUNK_FIXED_OFFSET (thunk) = d; THUNK_VIRTUAL_OFFSET (thunk) = virtual_offset; THUNK_ALIAS (thunk) = NULL_TREE; /* The thunk itself is not a constructor or destructor, even if the thing it is thunking to is. */ DECL_INTERFACE_KNOWN (thunk) = 1; DECL_NOT_REALLY_EXTERN (thunk) = 1; DECL_SAVED_FUNCTION_DATA (thunk) = NULL; DECL_DESTRUCTOR_P (thunk) = 0; DECL_CONSTRUCTOR_P (thunk) = 0; DECL_EXTERNAL (thunk) = 1; DECL_ARTIFICIAL (thunk) = 1; /* Even if this thunk is a member of a local class, we don't need a static chain. */ DECL_NO_STATIC_CHAIN (thunk) = 1; /* The THUNK is not a pending inline, even if the FUNCTION is. */ DECL_PENDING_INLINE_P (thunk) = 0; DECL_INLINE (thunk) = 0; DECL_DECLARED_INLINE_P (thunk) = 0; /* Nor has it been deferred. */ DECL_DEFERRED_FN (thunk) = 0; /* Nor is it a template instantiation. */ DECL_USE_TEMPLATE (thunk) = 0; DECL_TEMPLATE_INFO (thunk) = NULL; /* Add it to the list of thunks associated with FUNCTION. */ TREE_CHAIN (thunk) = DECL_THUNKS (function); DECL_THUNKS (function) = thunk; return thunk; }
static bool generate_memset_zero (gimple stmt, tree op0, tree nb_iter, gimple_stmt_iterator bsi) { tree t, addr_base; tree nb_bytes = NULL; bool res = false; gimple_seq stmts = NULL, stmt_list = NULL; gimple fn_call; tree mem, fndecl, fntype, fn; gimple_stmt_iterator i; ssa_op_iter iter; struct data_reference *dr = XCNEW (struct data_reference); DR_STMT (dr) = stmt; DR_REF (dr) = op0; if (!dr_analyze_innermost (dr)) goto end; /* Test for a positive stride, iterating over every element. */ if (integer_zerop (fold_build2 (MINUS_EXPR, integer_type_node, DR_STEP (dr), TYPE_SIZE_UNIT (TREE_TYPE (op0))))) { tree offset = fold_convert (sizetype, size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr))); addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (DR_BASE_ADDRESS (dr)), DR_BASE_ADDRESS (dr), offset); } /* Test for a negative stride, iterating over every element. */ else if (integer_zerop (fold_build2 (PLUS_EXPR, integer_type_node, TYPE_SIZE_UNIT (TREE_TYPE (op0)), DR_STEP (dr)))) { nb_bytes = build_size_arg (nb_iter, op0, &stmt_list); addr_base = size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr)); addr_base = fold_build2 (MINUS_EXPR, sizetype, addr_base, nb_bytes); addr_base = force_gimple_operand (addr_base, &stmts, true, NULL); gimple_seq_add_seq (&stmt_list, stmts); addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (DR_BASE_ADDRESS (dr)), DR_BASE_ADDRESS (dr), addr_base); } else goto end; mem = force_gimple_operand (addr_base, &stmts, true, NULL); gimple_seq_add_seq (&stmt_list, stmts); fndecl = implicit_built_in_decls [BUILT_IN_MEMSET]; fntype = TREE_TYPE (fndecl); fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); if (!nb_bytes) nb_bytes = build_size_arg (nb_iter, op0, &stmt_list); fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes); gimple_seq_add_stmt (&stmt_list, fn_call); for (i = gsi_start (stmt_list); !gsi_end_p (i); gsi_next (&i)) { gimple s = gsi_stmt (i); update_stmt_if_modified (s); FOR_EACH_SSA_TREE_OPERAND (t, s, iter, SSA_OP_VIRTUAL_DEFS) { if (TREE_CODE (t) == SSA_NAME) t = SSA_NAME_VAR (t); mark_sym_for_renaming (t); } } /* Mark also the uses of the VDEFS of STMT to be renamed. */ FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_VIRTUAL_DEFS) { if (TREE_CODE (t) == SSA_NAME) { gimple s; imm_use_iterator imm_iter; FOR_EACH_IMM_USE_STMT (s, imm_iter, t) update_stmt (s); t = SSA_NAME_VAR (t); } mark_sym_for_renaming (t); } gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING); res = true; if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "generated memset zero\n"); todo |= TODO_rebuild_alias; end: free_data_ref (dr); return res; }
static tree handle_latent_entropy_attribute(tree *node, tree name, tree args __unused, int flags __unused, bool *no_add_attrs) { tree type; #if BUILDING_GCC_VERSION <= 4007 VEC(constructor_elt, gc) *vals; #else vec<constructor_elt, va_gc> *vals; #endif switch (TREE_CODE(*node)) { default: *no_add_attrs = true; error("%qE attribute only applies to functions and variables", name); break; case VAR_DECL: if (DECL_INITIAL(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be initialized", *node, name); break; } if (!TREE_STATIC(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be local", *node, name); break; } type = TREE_TYPE(*node); switch (TREE_CODE(type)) { default: *no_add_attrs = true; error("variable %qD with %qE attribute must be an integer or a fixed length integer array type or a fixed sized structure with integer fields", *node, name); break; case RECORD_TYPE: { tree fld, lst = TYPE_FIELDS(type); unsigned int nelt = 0; for (fld = lst; fld; nelt++, fld = TREE_CHAIN(fld)) { tree fieldtype; fieldtype = TREE_TYPE(fld); if (TREE_CODE(fieldtype) == INTEGER_TYPE) continue; *no_add_attrs = true; error("structure variable %qD with %qE attribute has a non-integer field %qE", *node, name, fld); break; } if (fld) break; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (fld = lst; fld; fld = TREE_CHAIN(fld)) { tree random_const, fld_t = TREE_TYPE(fld); random_const = tree_get_random_const(fld_t); CONSTRUCTOR_APPEND_ELT(vals, fld, random_const); } /* Initialize the fields with random constants */ DECL_INITIAL(*node) = build_constructor(type, vals); break; } /* Initialize the variable with a random constant */ case INTEGER_TYPE: DECL_INITIAL(*node) = tree_get_random_const(type); break; case ARRAY_TYPE: { tree elt_type, array_size, elt_size; unsigned int i, nelt; HOST_WIDE_INT array_size_int, elt_size_int; elt_type = TREE_TYPE(type); elt_size = TYPE_SIZE_UNIT(TREE_TYPE(type)); array_size = TYPE_SIZE_UNIT(type); if (TREE_CODE(elt_type) != INTEGER_TYPE || !array_size || TREE_CODE(array_size) != INTEGER_CST) { *no_add_attrs = true; error("array variable %qD with %qE attribute must be a fixed length integer array type", *node, name); break; } array_size_int = TREE_INT_CST_LOW(array_size); elt_size_int = TREE_INT_CST_LOW(elt_size); nelt = array_size_int / elt_size_int; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (i = 0; i < nelt; i++) { tree cst = size_int(i); tree rand_cst = tree_get_random_const(elt_type); CONSTRUCTOR_APPEND_ELT(vals, cst, rand_cst); } /* * Initialize the elements of the array with random * constants */ DECL_INITIAL(*node) = build_constructor(type, vals); break; } } break; case FUNCTION_DECL: break; } return NULL_TREE; }
static tree cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) { tree defparm, parm, t; int i = 0; int nargs; tree *argarray; if (fn == NULL) return NULL; nargs = list_length (DECL_ARGUMENTS (fn)); argarray = (tree *) alloca (nargs * sizeof (tree)); defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); if (arg2) defparm = TREE_CHAIN (defparm); if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) { tree inner_type = TREE_TYPE (arg1); tree start1, end1, p1; tree start2 = NULL, p2 = NULL; tree ret = NULL, lab; start1 = arg1; start2 = arg2; do { inner_type = TREE_TYPE (inner_type); start1 = build4 (ARRAY_REF, inner_type, start1, size_zero_node, NULL, NULL); if (arg2) start2 = build4 (ARRAY_REF, inner_type, start2, size_zero_node, NULL, NULL); } while (TREE_CODE (inner_type) == ARRAY_TYPE); start1 = build_fold_addr_expr (start1); if (arg2) start2 = build_fold_addr_expr (start2); end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1); p1 = create_tmp_var (TREE_TYPE (start1), NULL); t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1); append_to_statement_list (t, &ret); if (arg2) { p2 = create_tmp_var (TREE_TYPE (start2), NULL); t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2); append_to_statement_list (t, &ret); } lab = create_artificial_label (); t = build1 (LABEL_EXPR, void_type_node, lab); append_to_statement_list (t, &ret); argarray[i++] = p1; if (arg2) argarray[i++] = p2; /* Handle default arguments. */ for (parm = defparm; parm && parm != void_list_node; parm = TREE_CHAIN (parm), i++) argarray[i] = convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i); t = build_call_a (fn, i, argarray); t = fold_convert (void_type_node, t); t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); append_to_statement_list (t, &ret); t = TYPE_SIZE_UNIT (inner_type); t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t); t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t); append_to_statement_list (t, &ret); if (arg2) { t = TYPE_SIZE_UNIT (inner_type); t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t); t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t); append_to_statement_list (t, &ret); } t = build2 (NE_EXPR, boolean_type_node, p1, end1); t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); append_to_statement_list (t, &ret); return ret; } else { argarray[i++] = build_fold_addr_expr (arg1); if (arg2) argarray[i++] = build_fold_addr_expr (arg2); /* Handle default arguments. */ for (parm = defparm; parm && parm != void_list_node; parm = TREE_CHAIN (parm), i++) argarray[i] = convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i); t = build_call_a (fn, i, argarray); t = fold_convert (void_type_node, t); return fold_build_cleanup_point_expr (TREE_TYPE (t), t); } }
static tree handle_latent_entropy_attribute(tree *node, tree name, tree args, int flags, bool *no_add_attrs) { tree type; unsigned long long mask; #if BUILDING_GCC_VERSION <= 4007 VEC(constructor_elt, gc) *vals; #else vec<constructor_elt, va_gc> *vals; #endif switch (TREE_CODE(*node)) { default: *no_add_attrs = true; error("%qE attribute only applies to functions and variables", name); break; case VAR_DECL: if (DECL_INITIAL(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be initialized", *node, name); break; } if (!TREE_STATIC(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be local", *node, name); break; } type = TREE_TYPE(*node); switch (TREE_CODE(type)) { default: *no_add_attrs = true; error("variable %qD with %qE attribute must be an integer or a fixed length integer array type or a fixed sized structure with integer fields", *node, name); break; case RECORD_TYPE: { tree field; unsigned int nelt = 0; for (field = TYPE_FIELDS(type); field; nelt++, field = TREE_CHAIN(field)) { tree fieldtype; fieldtype = TREE_TYPE(field); if (TREE_CODE(fieldtype) != INTEGER_TYPE) { *no_add_attrs = true; error("structure variable %qD with %qE attribute has a non-integer field %qE", *node, name, field); break; } } if (field) break; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (field = TYPE_FIELDS(type); field; field = TREE_CHAIN(field)) { tree fieldtype; fieldtype = TREE_TYPE(field); mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(fieldtype)) - 1); mask = 2 * (mask - 1) + 1; if (TYPE_UNSIGNED(fieldtype)) CONSTRUCTOR_APPEND_ELT(vals, field, build_int_cstu(fieldtype, mask & get_random_const())); else CONSTRUCTOR_APPEND_ELT(vals, field, build_int_cst(fieldtype, mask & get_random_const())); } DECL_INITIAL(*node) = build_constructor(type, vals); //debug_tree(DECL_INITIAL(*node)); break; } case INTEGER_TYPE: mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(type)) - 1); mask = 2 * (mask - 1) + 1; if (TYPE_UNSIGNED(type)) DECL_INITIAL(*node) = build_int_cstu(type, mask & get_random_const()); else DECL_INITIAL(*node) = build_int_cst(type, mask & get_random_const()); break; case ARRAY_TYPE: { tree elt_type, array_size, elt_size; unsigned int i, nelt; elt_type = TREE_TYPE(type); elt_size = TYPE_SIZE_UNIT(TREE_TYPE(type)); array_size = TYPE_SIZE_UNIT(type); if (TREE_CODE(elt_type) != INTEGER_TYPE || !array_size || TREE_CODE(array_size) != INTEGER_CST) { *no_add_attrs = true; error("array variable %qD with %qE attribute must be a fixed length integer array type", *node, name); break; } nelt = TREE_INT_CST_LOW(array_size) / TREE_INT_CST_LOW(elt_size); #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(elt_type)) - 1); mask = 2 * (mask - 1) + 1; for (i = 0; i < nelt; i++) if (TYPE_UNSIGNED(elt_type)) CONSTRUCTOR_APPEND_ELT(vals, size_int(i), build_int_cstu(elt_type, mask & get_random_const())); else CONSTRUCTOR_APPEND_ELT(vals, size_int(i), build_int_cst(elt_type, mask & get_random_const())); DECL_INITIAL(*node) = build_constructor(type, vals); //debug_tree(DECL_INITIAL(*node)); break; } } break; case FUNCTION_DECL: break; } return NULL_TREE; }
static tree build_common_decl (gfc_common_head *com, tree union_type, bool is_init) { gfc_symbol *common_sym; tree decl; /* Create a namespace to store symbols for common blocks. */ if (gfc_common_ns == NULL) gfc_common_ns = gfc_get_namespace (NULL); gfc_get_symbol (com->name, gfc_common_ns, &common_sym); decl = common_sym->backend_decl; /* Update the size of this common block as needed. */ if (decl != NULL_TREE) { tree size = TYPE_SIZE_UNIT (union_type); if (tree_int_cst_lt (DECL_SIZE_UNIT (decl), size)) { /* Named common blocks of the same name shall be of the same size in all scoping units of a program in which they appear, but blank common blocks may be of different sizes. */ if (strcmp (com->name, BLANK_COMMON_NAME)) gfc_warning ("Named COMMON block '%s' at %L shall be of the " "same size", com->name, &com->where); DECL_SIZE_UNIT (decl) = size; } } /* If this common block has been declared in a previous program unit, and either it is already initialized or there is no new initialization for it, just return. */ if ((decl != NULL_TREE) && (!is_init || DECL_INITIAL (decl))) return decl; /* If there is no backend_decl for the common block, build it. */ if (decl == NULL_TREE) { decl = build_decl (VAR_DECL, get_identifier (com->name), union_type); SET_DECL_ASSEMBLER_NAME (decl, gfc_sym_mangled_common_id (com->name)); TREE_PUBLIC (decl) = 1; TREE_STATIC (decl) = 1; DECL_ALIGN (decl) = BIGGEST_ALIGNMENT; DECL_USER_ALIGN (decl) = 0; gfc_set_decl_location (decl, &com->where); /* Place the back end declaration for this common block in GLOBAL_BINDING_LEVEL. */ common_sym->backend_decl = pushdecl_top_level (decl); } /* Has no initial values. */ if (!is_init) { DECL_INITIAL (decl) = NULL_TREE; DECL_COMMON (decl) = 1; DECL_DEFER_OUTPUT (decl) = 1; } else { DECL_INITIAL (decl) = error_mark_node; DECL_COMMON (decl) = 0; DECL_DEFER_OUTPUT (decl) = 0; } return decl; }
tree expand_start_catch_block (tree decl) { tree exp = NULL_TREE; tree type; bool is_java; if (! doing_eh (1)) return NULL_TREE; /* Make sure this declaration is reasonable. */ if (decl && !complete_ptr_ref_or_void_ptr_p (TREE_TYPE (decl), NULL_TREE)) decl = NULL_TREE; if (decl) type = prepare_eh_type (TREE_TYPE (decl)); else type = NULL_TREE; is_java = false; if (decl) { tree init; if (decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ init = build_exc_ptr (); init = build1 (NOP_EXPR, build_pointer_type (type), init); init = build (MINUS_EXPR, TREE_TYPE (init), init, TYPE_SIZE_UNIT (TREE_TYPE (init))); init = build_indirect_ref (init, NULL); is_java = true; } else { /* C++ requires that we call __cxa_begin_catch to get the pointer to the actual object. */ init = do_begin_catch (); } exp = create_temporary_var (ptr_type_node); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, NULL_TREE, LOOKUP_ONLYCONVERTING); finish_expr_stmt (build_modify_expr (exp, INIT_EXPR, init)); } else finish_expr_stmt (do_begin_catch ()); /* C++ requires that we call __cxa_end_catch at the end of processing the exception. */ if (! is_java) push_eh_cleanup (type); if (decl) initialize_handler_parm (decl, exp); return type; }
static unsigned self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n, struct loop *loop) { tree stride, access_fn; HOST_WIDE_INT *strides, astride; VEC (tree, heap) *access_fns; tree ref = DR_REF (dr); unsigned i, ret = ~0u; /* In the following example: for (i = 0; i < N; i++) for (j = 0; j < N; j++) use (a[j][i]); the same cache line is accessed each N steps (except if the change from i to i + 1 crosses the boundary of the cache line). Thus, for self-reuse, we cannot rely purely on the results of the data dependence analysis. Instead, we compute the stride of the reference in each loop, and consider the innermost loop in that the stride is less than cache size. */ strides = XCNEWVEC (HOST_WIDE_INT, n); access_fns = DR_ACCESS_FNS (dr); for (i = 0; VEC_iterate (tree, access_fns, i, access_fn); i++) { /* Keep track of the reference corresponding to the subscript, so that we know its stride. */ while (handled_component_p (ref) && TREE_CODE (ref) != ARRAY_REF) ref = TREE_OPERAND (ref, 0); if (TREE_CODE (ref) == ARRAY_REF) { stride = TYPE_SIZE_UNIT (TREE_TYPE (ref)); if (host_integerp (stride, 1)) astride = tree_low_cst (stride, 1); else astride = L1_CACHE_LINE_SIZE; ref = TREE_OPERAND (ref, 0); } else astride = 1; add_subscript_strides (access_fn, astride, strides, n, loop); } for (i = n; i-- > 0; ) { unsigned HOST_WIDE_INT s; s = strides[i] < 0 ? -strides[i] : strides[i]; if (s < (unsigned) L1_CACHE_LINE_SIZE && (loop_sizes[i] > (unsigned) (L1_CACHE_SIZE_BYTES / NONTEMPORAL_FRACTION))) { ret = loop_sizes[i]; break; } } free (strides); return ret; }
static void gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where) { gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL; gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL; gfc_symbol init_val_sym, outer_sym, intrinsic_sym; gfc_expr *e1, *e2, *e3, *e4; gfc_ref *ref; tree decl, backend_decl, stmt; locus old_loc = gfc_current_locus; const char *iname; gfc_try t; decl = OMP_CLAUSE_DECL (c); gfc_current_locus = where; /* Create a fake symbol for init value. */ memset (&init_val_sym, 0, sizeof (init_val_sym)); init_val_sym.ns = sym->ns; init_val_sym.name = sym->name; init_val_sym.ts = sym->ts; init_val_sym.attr.referenced = 1; init_val_sym.declared_at = where; init_val_sym.attr.flavor = FL_VARIABLE; backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym)); init_val_sym.backend_decl = backend_decl; /* Create a fake symbol for the outer array reference. */ outer_sym = *sym; outer_sym.as = gfc_copy_array_spec (sym->as); outer_sym.attr.dummy = 0; outer_sym.attr.result = 0; outer_sym.attr.flavor = FL_VARIABLE; outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL); /* Create fake symtrees for it. */ symtree1 = gfc_new_symtree (&root1, sym->name); symtree1->n.sym = sym; gcc_assert (symtree1 == root1); symtree2 = gfc_new_symtree (&root2, sym->name); symtree2->n.sym = &init_val_sym; gcc_assert (symtree2 == root2); symtree3 = gfc_new_symtree (&root3, sym->name); symtree3->n.sym = &outer_sym; gcc_assert (symtree3 == root3); /* Create expressions. */ e1 = gfc_get_expr (); e1->expr_type = EXPR_VARIABLE; e1->where = where; e1->symtree = symtree1; e1->ts = sym->ts; e1->ref = ref = gfc_get_ref (); ref->type = REF_ARRAY; ref->u.ar.where = where; ref->u.ar.as = sym->as; ref->u.ar.type = AR_FULL; ref->u.ar.dimen = 0; t = gfc_resolve_expr (e1); gcc_assert (t == SUCCESS); e2 = gfc_get_expr (); e2->expr_type = EXPR_VARIABLE; e2->where = where; e2->symtree = symtree2; e2->ts = sym->ts; t = gfc_resolve_expr (e2); gcc_assert (t == SUCCESS); e3 = gfc_copy_expr (e1); e3->symtree = symtree3; t = gfc_resolve_expr (e3); gcc_assert (t == SUCCESS); iname = NULL; switch (OMP_CLAUSE_REDUCTION_CODE (c)) { case PLUS_EXPR: case MINUS_EXPR: e4 = gfc_add (e3, e1); break; case MULT_EXPR: e4 = gfc_multiply (e3, e1); break; case TRUTH_ANDIF_EXPR: e4 = gfc_and (e3, e1); break; case TRUTH_ORIF_EXPR: e4 = gfc_or (e3, e1); break; case EQ_EXPR: e4 = gfc_eqv (e3, e1); break; case NE_EXPR: e4 = gfc_neqv (e3, e1); break; case MIN_EXPR: iname = "min"; break; case MAX_EXPR: iname = "max"; break; case BIT_AND_EXPR: iname = "iand"; break; case BIT_IOR_EXPR: iname = "ior"; break; case BIT_XOR_EXPR: iname = "ieor"; break; default: gcc_unreachable (); } if (iname != NULL) { memset (&intrinsic_sym, 0, sizeof (intrinsic_sym)); intrinsic_sym.ns = sym->ns; intrinsic_sym.name = iname; intrinsic_sym.ts = sym->ts; intrinsic_sym.attr.referenced = 1; intrinsic_sym.attr.intrinsic = 1; intrinsic_sym.attr.function = 1; intrinsic_sym.result = &intrinsic_sym; intrinsic_sym.declared_at = where; symtree4 = gfc_new_symtree (&root4, iname); symtree4->n.sym = &intrinsic_sym; gcc_assert (symtree4 == root4); e4 = gfc_get_expr (); e4->expr_type = EXPR_FUNCTION; e4->where = where; e4->symtree = symtree4; e4->value.function.isym = gfc_find_function (iname); e4->value.function.actual = gfc_get_actual_arglist (); e4->value.function.actual->expr = e3; e4->value.function.actual->next = gfc_get_actual_arglist (); e4->value.function.actual->next->expr = e1; } /* e1 and e3 have been stored as arguments of e4, avoid sharing. */ e1 = gfc_copy_expr (e1); e3 = gfc_copy_expr (e3); t = gfc_resolve_expr (e4); gcc_assert (t == SUCCESS); /* Create the init statement list. */ pushlevel (0); if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)) && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE) { /* If decl is an allocatable array, it needs to be allocated with the same bounds as the outer var. */ tree type = TREE_TYPE (decl), rank, size, esize, ptr; stmtblock_t block; gfc_start_block (&block); gfc_add_modify (&block, decl, outer_sym.backend_decl); rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1]; size = gfc_conv_descriptor_ubound_get (decl, rank); size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size, gfc_conv_descriptor_lbound_get (decl, rank)); size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size, gfc_index_one_node); if (GFC_TYPE_ARRAY_RANK (type) > 1) size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, gfc_conv_descriptor_stride_get (decl, rank)); esize = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (gfc_get_element_type (type))); size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize); size = gfc_evaluate_now (fold_convert (size_type_node, size), &block); ptr = gfc_allocate_array_with_status (&block, build_int_cst (pvoid_type_node, 0), size, NULL, NULL); gfc_conv_descriptor_data_set (&block, decl, ptr); gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false)); stmt = gfc_finish_block (&block); } else stmt = gfc_trans_assignment (e1, e2, false); if (TREE_CODE (stmt) != BIND_EXPR) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else poplevel (0, 0, 0); OMP_CLAUSE_REDUCTION_INIT (c) = stmt; /* Create the merge statement list. */ pushlevel (0); if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)) && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE) { /* If decl is an allocatable array, it needs to be deallocated afterwards. */ stmtblock_t block; gfc_start_block (&block); gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false)); gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl)); stmt = gfc_finish_block (&block); } else stmt = gfc_trans_assignment (e3, e4, false); if (TREE_CODE (stmt) != BIND_EXPR) stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0)); else poplevel (0, 0, 0); OMP_CLAUSE_REDUCTION_MERGE (c) = stmt; /* And stick the placeholder VAR_DECL into the clause as well. */ OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl; gfc_current_locus = old_loc; gfc_free_expr (e1); gfc_free_expr (e2); gfc_free_expr (e3); gfc_free_expr (e4); gfc_free (symtree1); gfc_free (symtree2); gfc_free (symtree3); if (symtree4) gfc_free (symtree4); gfc_free_array_spec (outer_sym.as); }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, gimple stmt, location_t location, bool func_args) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); bool sframe_inserted = false; size_t front_rz_size, rear_rz_size; tree fsize, rsize, size; gimple uninit_fncall_front, uninit_fncall_rear, init_fncall_front, \ init_fncall_rear, init_assign_stmt; tree fncall_param_front, fncall_param_rear; int map_ret; while (decl != NULL_TREE) { if ((mf_decl_eligible_p (decl) || TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl) && get_name(decl)) { DEBUGLOG("DEBUG Instrumenting %s is_complete_type %d\n", IDENTIFIER_POINTER(DECL_NAME(decl)), COMPLETE_TYPE_P(decl)); /* construct a tree corresponding to the type struct{ unsigned int rz_front[6U]; original variable unsigned int rz_rear[6U]; }; */ if (!sframe_inserted){ gimple ensure_fn_call = gimple_build_call (lbc_ensure_sframe_bitmap_fndecl, 0); gimple_set_location (ensure_fn_call, location); gsi_insert_before (&initially_stmts, ensure_fn_call, GSI_SAME_STMT); sframe_inserted = true; } // Calculate the zone sizes size_t element_size = 0, request_size = 0; if (COMPLETE_TYPE_P(decl)){ request_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(decl))); if (TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) element_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(TREE_TYPE(decl)))); else element_size = request_size; } calculate_zone_sizes(element_size, request_size, /*global*/ false, COMPLETE_TYPE_P(decl), &front_rz_size, &rear_rz_size); DEBUGLOG("DEBUG *SIZES* req_size %u, ele_size %u, fsize %u, rsize %u\n", request_size, element_size, front_rz_size, rear_rz_size); tree struct_type = create_struct_type(decl, front_rz_size, rear_rz_size); tree struct_var = create_struct_var(struct_type, decl, location); declare_vars(struct_var, stmt, 0); /* Inserting into hashtable */ PWord_t PV; JSLI(PV, decl_map, mf_varname_tree(decl)); gcc_assert(PV); *PV = (PWord_t) struct_var; fsize = convert (unsigned_type_node, size_int(front_rz_size)); gcc_assert (is_gimple_val (fsize)); tree rz_front = TYPE_FIELDS(struct_type); fncall_param_front = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_front), struct_var, rz_front, NULL_TREE))); uninit_fncall_front = gimple_build_call (lbc_uninit_front_rz_fndecl, 2, fncall_param_front, fsize); init_fncall_front = gimple_build_call (lbc_init_front_rz_fndecl, 2, fncall_param_front, fsize); gimple_set_location (init_fncall_front, location); gimple_set_location (uninit_fncall_front, location); // In complete types have only a front red zone if (COMPLETE_TYPE_P(decl)){ rsize = convert (unsigned_type_node, size_int(rear_rz_size)); gcc_assert (is_gimple_val (rsize)); tree rz_rear = DECL_CHAIN(DECL_CHAIN(TYPE_FIELDS (struct_type))); fncall_param_rear = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_rear), struct_var, rz_rear, NULL_TREE))); init_fncall_rear = gimple_build_call (lbc_init_rear_rz_fndecl, 2, fncall_param_rear, rsize); uninit_fncall_rear = gimple_build_call (lbc_uninit_rear_rz_fndecl, 2, fncall_param_rear, rsize); gimple_set_location (init_fncall_rear, location); gimple_set_location (uninit_fncall_rear, location); } // TODO Do I need this? #if 0 if (DECL_INITIAL(decl) != NULL_TREE){ // This code never seems to be getting executed for somehting like int i = 10; // I have no idea why? But looking at the tree dump, seems like its because // by the time it gets here, these kind of statements are split into two statements // as int i; and i = 10; respectively. I am leaving it in just in case. tree orig_var_type = DECL_CHAIN(TYPE_FIELDS (struct_type)); tree orig_var_lval = mf_mark (build3 (COMPONENT_REF, TREE_TYPE(orig_var_type), struct_var, orig_var_type, NULL_TREE)); init_assign_stmt = gimple_build_assign(orig_var_lval, DECL_INITIAL(decl)); gimple_set_location (init_assign_stmt, location); } #endif if (gsi_end_p (initially_stmts)) { // TODO handle this if (!DECL_ARTIFICIAL (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { #if 0 // Insert the declaration initializer if (DECL_INITIAL(decl) != NULL_TREE) gsi_insert_before (&initially_stmts, init_assign_stmt, GSI_SAME_STMT); #endif //gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); gsi_insert_before (&initially_stmts, init_fncall_front, GSI_SAME_STMT); if (COMPLETE_TYPE_P(decl)) gsi_insert_before (&initially_stmts, init_fncall_rear, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ //gimple_seq_add_stmt (&finally_stmts, unregister_fncall); gimple_seq_add_stmt (&finally_stmts, uninit_fncall_front); if (COMPLETE_TYPE_P(decl)) gimple_seq_add_stmt (&finally_stmts, uninit_fncall_rear); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = gimple_seq_alloc (); gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
static tree compute_object_offset (const_tree expr, const_tree var) { enum tree_code code = PLUS_EXPR; tree base, off, t; if (expr == var) return size_zero_node; switch (TREE_CODE (expr)) { case COMPONENT_REF: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; t = TREE_OPERAND (expr, 1); off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t), size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) / BITS_PER_UNIT)); break; case REALPART_EXPR: CASE_CONVERT: case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR: return compute_object_offset (TREE_OPERAND (expr, 0), var); case IMAGPART_EXPR: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; off = TYPE_SIZE_UNIT (TREE_TYPE (expr)); break; case ARRAY_REF: base = compute_object_offset (TREE_OPERAND (expr, 0), var); if (base == error_mark_node) return base; t = TREE_OPERAND (expr, 1); if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0) { code = MINUS_EXPR; t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t); } t = fold_convert (sizetype, t); off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t); break; case MEM_REF: gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR); return wide_int_to_tree (sizetype, mem_ref_offset (expr)); default: return error_mark_node; } return size_binop (code, base, off); }
static void mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp, location_t location, tree dirflag) { tree type, base=NULL_TREE, limit, addr, size, t, elt=NULL_TREE; tree temp, field, offset; bool check_red_flag = 0, instrumented = 0; tree fncall_param_val; gimple is_char_red_call; tree temp_instr, type_node; // TODO fix this to use our flag /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; DEBUGLOG("TREE_CODE(t) = %s, mf_decl_eligible_p : %d\n", tree_code_name[(int)TREE_CODE(*tp)], mf_decl_eligible_p(*tp)); t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); /* Don't instrument marked nodes. */ if (mf_marked_p (t) && !mf_decl_eligible_p(t)){ DEBUGLOG("Returning Here - 1\n"); return; } if (TREE_CODE(t) == ADDR_EXPR || \ TREE_CODE(t) == COMPONENT_REF || \ TREE_CODE(t) == ARRAY_REF || \ (TREE_CODE(t) == VAR_DECL && mf_decl_eligible_p(t))) { DEBUGLOG("------ INSTRUMENTING NODES ---------\n"); temp = TREE_OPERAND(t, 0); if(temp && (TREE_CODE(temp) == STRING_CST || \ TREE_CODE(temp) == FUNCTION_DECL)) // TODO Check this out? What do you do in this case? return; DEBUGLOG("TREE_CODE(temp) : %s\n", tree_code_name[(int)TREE_CODE(temp)]); if (TREE_CODE(t) == VAR_DECL) *tp = mf_walk_n_instrument(tp, &instrumented); else TREE_OPERAND(t,0) = mf_walk_n_instrument(&(TREE_OPERAND(t,0)), &instrumented); if (TREE_CODE(t) == ADDR_EXPR) return; } DEBUGLOG("Pass2 derefs: entering deref section\n"); type_node = NULL_TREE; //TODO move this to appropriate cases t = *tp; switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: // TODO check if following works for comp refs { DEBUGLOG("------ INSIDE CASE COMPONENT_REF ---------\n"); HOST_WIDE_INT bitsize, bitpos; tree inner, offset; int volatilep, unsignedp; enum machine_mode mode1; check_red_flag = 1; inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode1, &unsignedp, &volatilep, false); if (!offset) offset = size_zero_node; offset = size_binop (PLUS_EXPR, offset, size_int (bitpos / BITS_PER_UNIT)); addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node, build1 (ADDR_EXPR, build_pointer_type(type), inner), offset); break; // TODO continue? } case INDIRECT_REF: DEBUGLOG("------ INSIDE CASE INDIRECT_REF ---------\n"); check_red_flag = 1; addr = TREE_OPERAND (t, 0); break; // TODO continue? case MEM_REF: DEBUGLOG("------ INSIDE CASE MEM_REF ---------\n"); check_red_flag = 1; addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)), TREE_OPERAND (t, 0), fold_convert (sizetype, TREE_OPERAND (t, 1))); break; case TARGET_MEM_REF: DEBUGLOG("------ INSIDE CASE TARGET_MEM_REF ---------\n"); check_red_flag = 1; addr = tree_mem_ref_addr (ptr_type_node, t); break; // TODO do you want to do this case? find out what it does. case ARRAY_RANGE_REF: DEBUGLOG("------ INSIDE CASE ARRAY_RANGE_REF ---------\n"); DEBUGLOG("------ TODO not handled yet---------\n"); return; case BIT_FIELD_REF: DEBUGLOG("------ INSIDE CASE BIT_FIELD_REF ---------\n"); DEBUGLOG("------ TODO not handled yet---------\n"); return; default: DEBUGLOG("------ INSIDE CASE DEFAULT ---------\n"); if(mf_decl_eligible_p(t)) { DEBUGLOG("Do you want to be here?\n"); return; /*if((*tp = mx_xform_instrument_pass2(t)) == NULL_TREE){ DEBUGLOG("Failed to set tree operand\n"); return; }*/ } } // Add the call to is_char_red if (check_red_flag) { DEBUGLOG("Entering is_char_red\n"); fncall_param_val = fold_build2_loc (location, MEM_REF, ptr_type_node, addr, \ build_int_cst(build_pointer_type(type), 0)); fncall_param_val = fold_convert_loc (location, unsigned_type_node, fncall_param_val); is_char_red_call = gimple_build_call (lbc_is_char_red_fndecl, 3, fncall_param_val, size, \ fold_convert_loc(location, ptr_type_node, addr)); gimple_set_location (is_char_red_call, location); //debug_gimple_stmt(is_char_red_call); gsi_insert_before (iter, is_char_red_call, GSI_SAME_STMT); DEBUGLOG("Done with is_char_red\n"); } DEBUGLOG("Exiting derefs \n"); }
static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *osi, const_tree ptr, int object_size_type) { tree pt_var, pt_var_size = NULL_TREE, var_size, bytes; gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); pt_var = TREE_OPERAND (ptr, 0); while (handled_component_p (pt_var)) pt_var = TREE_OPERAND (pt_var, 0); if (pt_var && TREE_CODE (pt_var) == MEM_REF) { unsigned HOST_WIDE_INT sz; if (!osi || (object_size_type & 1) != 0 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME) { sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0), object_size_type & ~1); } else { tree var = TREE_OPERAND (pt_var, 0); if (osi->pass == 0) collect_object_sizes_for (osi, var); if (bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (var))) sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)]; else sz = unknown[object_size_type]; } if (sz != unknown[object_size_type]) { offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var)); if (wi::neg_p (dsz)) sz = 0; else if (wi::fits_uhwi_p (dsz)) sz = dsz.to_uhwi (); else sz = unknown[object_size_type]; } if (sz != unknown[object_size_type] && sz < offset_limit) pt_var_size = size_int (sz); } else if (pt_var && DECL_P (pt_var) && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var)) && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit) pt_var_size = DECL_SIZE_UNIT (pt_var); else if (pt_var && TREE_CODE (pt_var) == STRING_CST && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var))) < offset_limit) pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); else return unknown[object_size_type]; if (pt_var != TREE_OPERAND (ptr, 0)) { tree var; if (object_size_type & 1) { var = TREE_OPERAND (ptr, 0); while (var != pt_var && TREE_CODE (var) != BIT_FIELD_REF && TREE_CODE (var) != COMPONENT_REF && TREE_CODE (var) != ARRAY_REF && TREE_CODE (var) != ARRAY_RANGE_REF && TREE_CODE (var) != REALPART_EXPR && TREE_CODE (var) != IMAGPART_EXPR) var = TREE_OPERAND (var, 0); if (var != pt_var && TREE_CODE (var) == ARRAY_REF) var = TREE_OPERAND (var, 0); if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var))) || (pt_var_size && tree_int_cst_lt (pt_var_size, TYPE_SIZE_UNIT (TREE_TYPE (var))))) var = pt_var; else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF) { tree v = var; /* For &X->fld, compute object size only if fld isn't the last field, as struct { int i; char c[1]; } is often used instead of flexible array member. */ while (v && v != pt_var) switch (TREE_CODE (v)) { case ARRAY_REF: if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0))) && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST) { tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0))); if (domain && TYPE_MAX_VALUE (domain) && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST && tree_int_cst_lt (TREE_OPERAND (v, 1), TYPE_MAX_VALUE (domain))) { v = NULL_TREE; break; } } v = TREE_OPERAND (v, 0); break; case REALPART_EXPR: case IMAGPART_EXPR: v = NULL_TREE; break; case COMPONENT_REF: if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE) { v = NULL_TREE; break; } while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) != UNION_TYPE && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) != QUAL_UNION_TYPE) break; else v = TREE_OPERAND (v, 0); if (TREE_CODE (v) == COMPONENT_REF && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) == RECORD_TYPE) { tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) if (TREE_CODE (fld_chain) == FIELD_DECL) break; if (fld_chain) { v = NULL_TREE; break; } v = TREE_OPERAND (v, 0); } while (v != pt_var && TREE_CODE (v) == COMPONENT_REF) if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) != UNION_TYPE && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) != QUAL_UNION_TYPE) break; else v = TREE_OPERAND (v, 0); if (v != pt_var) v = NULL_TREE; else v = pt_var; break; default: v = pt_var; break; } if (v == pt_var) var = pt_var; } } else var = pt_var; if (var != pt_var) var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); else if (!pt_var_size) return unknown[object_size_type]; else var_size = pt_var_size; bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); if (bytes != error_mark_node) { if (TREE_CODE (bytes) == INTEGER_CST && tree_int_cst_lt (var_size, bytes)) bytes = size_zero_node; else bytes = size_binop (MINUS_EXPR, var_size, bytes); } if (var != pt_var && pt_var_size && TREE_CODE (pt_var) == MEM_REF && bytes != error_mark_node) { tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var); if (bytes2 != error_mark_node) { if (TREE_CODE (bytes2) == INTEGER_CST && tree_int_cst_lt (pt_var_size, bytes2)) bytes2 = size_zero_node; else bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2); bytes = size_binop (MIN_EXPR, bytes, bytes2); } } } else if (!pt_var_size) return unknown[object_size_type]; else bytes = pt_var_size; if (tree_fits_uhwi_p (bytes)) return tree_to_uhwi (bytes); return unknown[object_size_type]; }
static unsigned HOST_WIDE_INT addr_object_size (tree ptr, int object_size_type) { tree pt_var; gcc_assert (TREE_CODE (ptr) == ADDR_EXPR); pt_var = TREE_OPERAND (ptr, 0); if (REFERENCE_CLASS_P (pt_var)) pt_var = get_base_address (pt_var); if (pt_var && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST) && TYPE_SIZE_UNIT (TREE_TYPE (pt_var)) && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) && (unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1) < offset_limit) { tree bytes; if (pt_var != TREE_OPERAND (ptr, 0)) { tree var; if (object_size_type & 1) { var = TREE_OPERAND (ptr, 0); while (var != pt_var && TREE_CODE (var) != BIT_FIELD_REF && TREE_CODE (var) != COMPONENT_REF && TREE_CODE (var) != ARRAY_REF && TREE_CODE (var) != ARRAY_RANGE_REF && TREE_CODE (var) != REALPART_EXPR && TREE_CODE (var) != IMAGPART_EXPR) var = TREE_OPERAND (var, 0); if (var != pt_var && TREE_CODE (var) == ARRAY_REF) var = TREE_OPERAND (var, 0); if (! TYPE_SIZE_UNIT (TREE_TYPE (var)) || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1) || tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), TYPE_SIZE_UNIT (TREE_TYPE (var)))) var = pt_var; } else var = pt_var; bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var); if (bytes != error_mark_node) { if (TREE_CODE (bytes) == INTEGER_CST && tree_int_cst_lt (TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes)) bytes = size_zero_node; else bytes = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (var)), bytes); } } else bytes = TYPE_SIZE_UNIT (TREE_TYPE (pt_var)); if (host_integerp (bytes, 1)) return tree_low_cst (bytes, 1); } return unknown[object_size_type]; }
void print_node (FILE *file, const char *prefix, tree node, int indent) { int hash; struct bucket *b; machine_mode mode; enum tree_code_class tclass; int len; int i; expanded_location xloc; enum tree_code code; if (node == 0) return; code = TREE_CODE (node); tclass = TREE_CODE_CLASS (code); /* Don't get too deep in nesting. If the user wants to see deeper, it is easy to use the address of a lowest-level node as an argument in another call to debug_tree. */ if (indent > 24) { print_node_brief (file, prefix, node, indent); return; } if (indent > 8 && (tclass == tcc_type || tclass == tcc_declaration)) { print_node_brief (file, prefix, node, indent); return; } /* It is unsafe to look at any other fields of an ERROR_MARK node. */ if (code == ERROR_MARK) { print_node_brief (file, prefix, node, indent); return; } /* Allow this function to be called if the table is not there. */ if (table) { hash = ((uintptr_t) node) % HASH_SIZE; /* If node is in the table, just mention its address. */ for (b = table[hash]; b; b = b->next) if (b->node == node) { print_node_brief (file, prefix, node, indent); return; } /* Add this node to the table. */ b = XNEW (struct bucket); b->node = node; b->next = table[hash]; table[hash] = b; } /* Indent to the specified column, since this is the long form. */ indent_to (file, indent); /* Print the slot this node is in, and its code, and address. */ fprintf (file, "%s <%s", prefix, get_tree_code_name (code)); dump_addr (file, " ", node); /* Print the name, if any. */ if (tclass == tcc_declaration) { if (DECL_NAME (node)) fprintf (file, " %s", IDENTIFIER_POINTER (DECL_NAME (node))); else if (code == LABEL_DECL && LABEL_DECL_UID (node) != -1) { if (dump_flags & TDF_NOUID) fprintf (file, " L.xxxx"); else fprintf (file, " L.%d", (int) LABEL_DECL_UID (node)); } else { if (dump_flags & TDF_NOUID) fprintf (file, " %c.xxxx", code == CONST_DECL ? 'C' : 'D'); else fprintf (file, " %c.%u", code == CONST_DECL ? 'C' : 'D', DECL_UID (node)); } } else if (tclass == tcc_type) { if (TYPE_NAME (node)) { if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE) fprintf (file, " %s", IDENTIFIER_POINTER (TYPE_NAME (node))); else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL && DECL_NAME (TYPE_NAME (node))) fprintf (file, " %s", IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (node)))); } } if (code == IDENTIFIER_NODE) fprintf (file, " %s", IDENTIFIER_POINTER (node)); if (code == INTEGER_CST) { if (indent <= 4) print_node_brief (file, "type", TREE_TYPE (node), indent + 4); } else if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) { print_node (file, "type", TREE_TYPE (node), indent + 4); if (TREE_TYPE (node)) indent_to (file, indent + 3); } if (!TYPE_P (node) && TREE_SIDE_EFFECTS (node)) fputs (" side-effects", file); if (TYPE_P (node) ? TYPE_READONLY (node) : TREE_READONLY (node)) fputs (" readonly", file); if (TYPE_P (node) && TYPE_ATOMIC (node)) fputs (" atomic", file); if (!TYPE_P (node) && TREE_CONSTANT (node)) fputs (" constant", file); else if (TYPE_P (node) && TYPE_SIZES_GIMPLIFIED (node)) fputs (" sizes-gimplified", file); if (TYPE_P (node) && !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (node))) fprintf (file, " address-space-%d", TYPE_ADDR_SPACE (node)); if (TREE_ADDRESSABLE (node)) fputs (" addressable", file); if (TREE_THIS_VOLATILE (node)) fputs (" volatile", file); if (TREE_ASM_WRITTEN (node)) fputs (" asm_written", file); if (TREE_USED (node)) fputs (" used", file); if (TREE_NOTHROW (node)) fputs (TYPE_P (node) ? " align-ok" : " nothrow", file); if (TREE_PUBLIC (node)) fputs (" public", file); if (TREE_PRIVATE (node)) fputs (" private", file); if (TREE_PROTECTED (node)) fputs (" protected", file); if (TREE_STATIC (node)) fputs (" static", file); if (TREE_DEPRECATED (node)) fputs (" deprecated", file); if (TREE_VISITED (node)) fputs (" visited", file); if (code != TREE_VEC && code != INTEGER_CST && code != SSA_NAME) { if (TREE_LANG_FLAG_0 (node)) fputs (" tree_0", file); if (TREE_LANG_FLAG_1 (node)) fputs (" tree_1", file); if (TREE_LANG_FLAG_2 (node)) fputs (" tree_2", file); if (TREE_LANG_FLAG_3 (node)) fputs (" tree_3", file); if (TREE_LANG_FLAG_4 (node)) fputs (" tree_4", file); if (TREE_LANG_FLAG_5 (node)) fputs (" tree_5", file); if (TREE_LANG_FLAG_6 (node)) fputs (" tree_6", file); } /* DECL_ nodes have additional attributes. */ switch (TREE_CODE_CLASS (code)) { case tcc_declaration: if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { if (DECL_UNSIGNED (node)) fputs (" unsigned", file); if (DECL_IGNORED_P (node)) fputs (" ignored", file); if (DECL_ABSTRACT_P (node)) fputs (" abstract", file); if (DECL_EXTERNAL (node)) fputs (" external", file); if (DECL_NONLOCAL (node)) fputs (" nonlocal", file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) { if (DECL_WEAK (node)) fputs (" weak", file); if (DECL_IN_SYSTEM_HEADER (node)) fputs (" in_system_header", file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL) && code != LABEL_DECL && code != FUNCTION_DECL && DECL_REGISTER (node)) fputs (" regdecl", file); if (code == TYPE_DECL && TYPE_DECL_SUPPRESS_DEBUG (node)) fputs (" suppress-debug", file); if (code == FUNCTION_DECL && DECL_FUNCTION_SPECIFIC_TARGET (node)) fputs (" function-specific-target", file); if (code == FUNCTION_DECL && DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node)) fputs (" function-specific-opt", file); if (code == FUNCTION_DECL && DECL_DECLARED_INLINE_P (node)) fputs (" autoinline", file); if (code == FUNCTION_DECL && DECL_BUILT_IN (node)) fputs (" built-in", file); if (code == FUNCTION_DECL && DECL_STATIC_CHAIN (node)) fputs (" static-chain", file); if (TREE_CODE (node) == FUNCTION_DECL && decl_is_tm_clone (node)) fputs (" tm-clone", file); if (code == FIELD_DECL && DECL_PACKED (node)) fputs (" packed", file); if (code == FIELD_DECL && DECL_BIT_FIELD (node)) fputs (" bit-field", file); if (code == FIELD_DECL && DECL_NONADDRESSABLE_P (node)) fputs (" nonaddressable", file); if (code == LABEL_DECL && EH_LANDING_PAD_NR (node)) fprintf (file, " landing-pad:%d", EH_LANDING_PAD_NR (node)); if (code == VAR_DECL && DECL_IN_TEXT_SECTION (node)) fputs (" in-text-section", file); if (code == VAR_DECL && DECL_IN_CONSTANT_POOL (node)) fputs (" in-constant-pool", file); if (code == VAR_DECL && DECL_COMMON (node)) fputs (" common", file); if (code == VAR_DECL && DECL_THREAD_LOCAL_P (node)) { fputs (" ", file); fputs (tls_model_names[DECL_TLS_MODEL (node)], file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { if (DECL_VIRTUAL_P (node)) fputs (" virtual", file); if (DECL_PRESERVE_P (node)) fputs (" preserve", file); if (DECL_LANG_FLAG_0 (node)) fputs (" decl_0", file); if (DECL_LANG_FLAG_1 (node)) fputs (" decl_1", file); if (DECL_LANG_FLAG_2 (node)) fputs (" decl_2", file); if (DECL_LANG_FLAG_3 (node)) fputs (" decl_3", file); if (DECL_LANG_FLAG_4 (node)) fputs (" decl_4", file); if (DECL_LANG_FLAG_5 (node)) fputs (" decl_5", file); if (DECL_LANG_FLAG_6 (node)) fputs (" decl_6", file); if (DECL_LANG_FLAG_7 (node)) fputs (" decl_7", file); mode = DECL_MODE (node); fprintf (file, " %s", GET_MODE_NAME (mode)); } if ((code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) && DECL_BY_REFERENCE (node)) fputs (" passed-by-reference", file); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS) && DECL_DEFER_OUTPUT (node)) fputs (" defer-output", file); xloc = expand_location (DECL_SOURCE_LOCATION (node)); fprintf (file, " file %s line %d col %d", xloc.file, xloc.line, xloc.column); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { print_node (file, "size", DECL_SIZE (node), indent + 4); print_node (file, "unit size", DECL_SIZE_UNIT (node), indent + 4); if (code != FUNCTION_DECL || DECL_BUILT_IN (node)) indent_to (file, indent + 3); if (DECL_USER_ALIGN (node)) fprintf (file, " user"); fprintf (file, " align %d", DECL_ALIGN (node)); if (code == FIELD_DECL) fprintf (file, " offset_align " HOST_WIDE_INT_PRINT_UNSIGNED, DECL_OFFSET_ALIGN (node)); if (code == FUNCTION_DECL && DECL_BUILT_IN (node)) { if (DECL_BUILT_IN_CLASS (node) == BUILT_IN_MD) fprintf (file, " built-in BUILT_IN_MD %d", DECL_FUNCTION_CODE (node)); else fprintf (file, " built-in %s:%s", built_in_class_names[(int) DECL_BUILT_IN_CLASS (node)], built_in_names[(int) DECL_FUNCTION_CODE (node)]); } } if (code == FIELD_DECL) { print_node (file, "offset", DECL_FIELD_OFFSET (node), indent + 4); print_node (file, "bit offset", DECL_FIELD_BIT_OFFSET (node), indent + 4); if (DECL_BIT_FIELD_TYPE (node)) print_node (file, "bit_field_type", DECL_BIT_FIELD_TYPE (node), indent + 4); } print_node_brief (file, "context", DECL_CONTEXT (node), indent + 4); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { print_node_brief (file, "attributes", DECL_ATTRIBUTES (node), indent + 4); if (code != PARM_DECL) print_node_brief (file, "initial", DECL_INITIAL (node), indent + 4); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) { print_node_brief (file, "abstract_origin", DECL_ABSTRACT_ORIGIN (node), indent + 4); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) { print_node (file, "result", DECL_RESULT_FLD (node), indent + 4); } lang_hooks.print_decl (file, node, indent); if (DECL_RTL_SET_P (node)) { indent_to (file, indent + 4); print_rtl (file, DECL_RTL (node)); } if (code == PARM_DECL) { print_node (file, "arg-type", DECL_ARG_TYPE (node), indent + 4); if (DECL_INCOMING_RTL (node) != 0) { indent_to (file, indent + 4); fprintf (file, "incoming-rtl "); print_rtl (file, DECL_INCOMING_RTL (node)); } } else if (code == FUNCTION_DECL && DECL_STRUCT_FUNCTION (node) != 0) { print_node (file, "arguments", DECL_ARGUMENTS (node), indent + 4); indent_to (file, indent + 4); dump_addr (file, "struct-function ", DECL_STRUCT_FUNCTION (node)); } if ((code == VAR_DECL || code == PARM_DECL) && DECL_HAS_VALUE_EXPR_P (node)) print_node (file, "value-expr", DECL_VALUE_EXPR (node), indent + 4); /* Print the decl chain only if decl is at second level. */ if (indent == 4) print_node (file, "chain", TREE_CHAIN (node), indent + 4); else print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4); break; case tcc_type: if (TYPE_UNSIGNED (node)) fputs (" unsigned", file); if (TYPE_NO_FORCE_BLK (node)) fputs (" no-force-blk", file); if (TYPE_STRING_FLAG (node)) fputs (" string-flag", file); if (TYPE_NEEDS_CONSTRUCTING (node)) fputs (" needs-constructing", file); if ((code == RECORD_TYPE || code == UNION_TYPE || code == QUAL_UNION_TYPE || code == ARRAY_TYPE) && TYPE_REVERSE_STORAGE_ORDER (node)) fputs (" reverse-storage-order", file); /* The transparent-union flag is used for different things in different nodes. */ if ((code == UNION_TYPE || code == RECORD_TYPE) && TYPE_TRANSPARENT_AGGR (node)) fputs (" transparent-aggr", file); else if (code == ARRAY_TYPE && TYPE_NONALIASED_COMPONENT (node)) fputs (" nonaliased-component", file); if (TYPE_PACKED (node)) fputs (" packed", file); if (TYPE_RESTRICT (node)) fputs (" restrict", file); if (TYPE_LANG_FLAG_0 (node)) fputs (" type_0", file); if (TYPE_LANG_FLAG_1 (node)) fputs (" type_1", file); if (TYPE_LANG_FLAG_2 (node)) fputs (" type_2", file); if (TYPE_LANG_FLAG_3 (node)) fputs (" type_3", file); if (TYPE_LANG_FLAG_4 (node)) fputs (" type_4", file); if (TYPE_LANG_FLAG_5 (node)) fputs (" type_5", file); if (TYPE_LANG_FLAG_6 (node)) fputs (" type_6", file); mode = TYPE_MODE (node); fprintf (file, " %s", GET_MODE_NAME (mode)); print_node (file, "size", TYPE_SIZE (node), indent + 4); print_node (file, "unit size", TYPE_SIZE_UNIT (node), indent + 4); indent_to (file, indent + 3); if (TYPE_USER_ALIGN (node)) fprintf (file, " user"); fprintf (file, " align %d symtab %d alias set " HOST_WIDE_INT_PRINT_DEC, TYPE_ALIGN (node), TYPE_SYMTAB_ADDRESS (node), (HOST_WIDE_INT) TYPE_ALIAS_SET (node)); if (TYPE_STRUCTURAL_EQUALITY_P (node)) fprintf (file, " structural equality"); else dump_addr (file, " canonical type ", TYPE_CANONICAL (node)); print_node (file, "attributes", TYPE_ATTRIBUTES (node), indent + 4); if (INTEGRAL_TYPE_P (node) || code == REAL_TYPE || code == FIXED_POINT_TYPE) { fprintf (file, " precision %d", TYPE_PRECISION (node)); print_node_brief (file, "min", TYPE_MIN_VALUE (node), indent + 4); print_node_brief (file, "max", TYPE_MAX_VALUE (node), indent + 4); } if (code == ENUMERAL_TYPE) print_node (file, "values", TYPE_VALUES (node), indent + 4); else if (code == ARRAY_TYPE) print_node (file, "domain", TYPE_DOMAIN (node), indent + 4); else if (code == VECTOR_TYPE) fprintf (file, " nunits %d", (int) TYPE_VECTOR_SUBPARTS (node)); else if (code == RECORD_TYPE || code == UNION_TYPE || code == QUAL_UNION_TYPE) print_node (file, "fields", TYPE_FIELDS (node), indent + 4); else if (code == FUNCTION_TYPE || code == METHOD_TYPE) { if (TYPE_METHOD_BASETYPE (node)) print_node_brief (file, "method basetype", TYPE_METHOD_BASETYPE (node), indent + 4); print_node (file, "arg-types", TYPE_ARG_TYPES (node), indent + 4); } else if (code == OFFSET_TYPE) print_node_brief (file, "basetype", TYPE_OFFSET_BASETYPE (node), indent + 4); if (TYPE_CONTEXT (node)) print_node_brief (file, "context", TYPE_CONTEXT (node), indent + 4); lang_hooks.print_type (file, node, indent); if (TYPE_POINTER_TO (node) || TREE_CHAIN (node)) indent_to (file, indent + 3); print_node_brief (file, "pointer_to_this", TYPE_POINTER_TO (node), indent + 4); print_node_brief (file, "reference_to_this", TYPE_REFERENCE_TO (node), indent + 4); print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4); break; case tcc_expression: case tcc_comparison: case tcc_unary: case tcc_binary: case tcc_reference: case tcc_statement: case tcc_vl_exp: if (code == BIND_EXPR) { print_node (file, "vars", TREE_OPERAND (node, 0), indent + 4); print_node (file, "body", TREE_OPERAND (node, 1), indent + 4); print_node (file, "block", TREE_OPERAND (node, 2), indent + 4); break; } if (code == CALL_EXPR) { call_expr_arg_iterator iter; tree arg; print_node (file, "fn", CALL_EXPR_FN (node), indent + 4); print_node (file, "static_chain", CALL_EXPR_STATIC_CHAIN (node), indent + 4); i = 0; FOR_EACH_CALL_EXPR_ARG (arg, iter, node) { char temp[10]; sprintf (temp, "arg %d", i); print_node (file, temp, arg, indent + 4); i++; } }
static bool forward_propagate_addr_into_variable_array_index (tree offset, tree def_rhs, gimple_stmt_iterator *use_stmt_gsi) { tree index, tunit; gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi); tree tmp; tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs))); if (!host_integerp (tunit, 1)) return false; /* Get the offset's defining statement. */ offset_def = SSA_NAME_DEF_STMT (offset); /* Try to find an expression for a proper index. This is either a multiplication expression by the element size or just the ssa name we came along in case the element size is one. In that case, however, we do not allow multiplications because they can be computing index to a higher level dimension (PR 37861). */ if (integer_onep (tunit)) { if (is_gimple_assign (offset_def) && gimple_assign_rhs_code (offset_def) == MULT_EXPR) return false; index = offset; } else { /* The statement which defines OFFSET before type conversion must be a simple GIMPLE_ASSIGN. */ if (!is_gimple_assign (offset_def)) return false; /* The RHS of the statement which defines OFFSET must be a multiplication of an object by the size of the array elements. This implicitly verifies that the size of the array elements is constant. */ if (gimple_assign_rhs_code (offset_def) == MULT_EXPR && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit)) { /* The first operand to the MULT_EXPR is the desired index. */ index = gimple_assign_rhs1 (offset_def); } /* If we have idx * tunit + CST * tunit re-associate that. */ else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR || gimple_assign_rhs_code (offset_def) == MINUS_EXPR) && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR, gimple_assign_rhs2 (offset_def), tunit)) != NULL_TREE) { gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def)); if (is_gimple_assign (offset_def2) && gimple_assign_rhs_code (offset_def2) == MULT_EXPR && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit)) { index = fold_build2 (gimple_assign_rhs_code (offset_def), TREE_TYPE (offset), gimple_assign_rhs1 (offset_def2), tmp); } else return false; } else return false; } /* Replace the pointer addition with array indexing. */ index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE, true, GSI_SAME_STMT); gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs)); use_stmt = gsi_stmt (*use_stmt_gsi); TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1) = index; /* That should have created gimple, so there is no need to record information to undo the propagation. */ fold_stmt_inplace (use_stmt); tidy_after_forward_propagate_addr (use_stmt); return true; }
static tree gfc_build_final_call (gfc_typespec ts, gfc_expr *final_wrapper, gfc_expr *var, bool fini_coarray, gfc_expr *class_size) { stmtblock_t block; gfc_se se; tree final_fndecl, array, size, tmp; symbol_attribute attr; gcc_assert (final_wrapper->expr_type == EXPR_VARIABLE); gcc_assert (var); gfc_start_block (&block); gfc_init_se (&se, NULL); gfc_conv_expr (&se, final_wrapper); final_fndecl = se.expr; if (POINTER_TYPE_P (TREE_TYPE (final_fndecl))) final_fndecl = build_fold_indirect_ref_loc (input_location, final_fndecl); if (ts.type == BT_DERIVED) { tree elem_size; gcc_assert (!class_size); elem_size = gfc_typenode_for_spec (&ts); elem_size = TYPE_SIZE_UNIT (elem_size); size = fold_convert (gfc_array_index_type, elem_size); gfc_init_se (&se, NULL); se.want_pointer = 1; if (var->rank) { se.descriptor_only = 1; gfc_conv_expr_descriptor (&se, var); array = se.expr; } else { gfc_conv_expr (&se, var); gcc_assert (se.pre.head == NULL_TREE && se.post.head == NULL_TREE); array = se.expr; /* No copy back needed, hence set attr's allocatable/pointer to zero. */ gfc_clear_attr (&attr); gfc_init_se (&se, NULL); array = gfc_conv_scalar_to_descriptor (&se, array, attr); gcc_assert (se.post.head == NULL_TREE); } } else { gfc_expr *array_expr; gcc_assert (class_size); gfc_init_se (&se, NULL); gfc_conv_expr (&se, class_size); gfc_add_block_to_block (&block, &se.pre); gcc_assert (se.post.head == NULL_TREE); size = se.expr; array_expr = gfc_copy_expr (var); gfc_init_se (&se, NULL); se.want_pointer = 1; if (array_expr->rank) { gfc_add_class_array_ref (array_expr); se.descriptor_only = 1; gfc_conv_expr_descriptor (&se, array_expr); array = se.expr; } else { gfc_add_data_component (array_expr); gfc_conv_expr (&se, array_expr); gfc_add_block_to_block (&block, &se.pre); gcc_assert (se.post.head == NULL_TREE); array = se.expr; if (TREE_CODE (array) == ADDR_EXPR && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (array, 0)))) tmp = TREE_OPERAND (array, 0); if (!gfc_is_coarray (array_expr)) { /* No copy back needed, hence set attr's allocatable/pointer to zero. */ gfc_clear_attr (&attr); gfc_init_se (&se, NULL); array = gfc_conv_scalar_to_descriptor (&se, array, attr); } gcc_assert (se.post.head == NULL_TREE); } gfc_free_expr (array_expr); } if (!POINTER_TYPE_P (TREE_TYPE (array))) array = gfc_build_addr_expr (NULL, array); gfc_add_block_to_block (&block, &se.pre); tmp = build_call_expr_loc (input_location, final_fndecl, 3, array, size, fini_coarray ? boolean_true_node : boolean_false_node); gfc_add_block_to_block (&block, &se.post); gfc_add_expr_to_block (&block, tmp); return gfc_finish_block (&block); }
void browse_tree (tree begin) { tree head; TB_CODE tbc = TB_UNUSED_COMMAND; ssize_t rd; char *input = NULL; long input_size = 0; fprintf (TB_OUT_FILE, "\nTree Browser\n"); #define TB_SET_HEAD(N) do { \ TB_history_stack = tree_cons (NULL_TREE, (N), TB_history_stack); \ head = N; \ if (TB_verbose) \ if (head) \ { \ print_generic_expr (TB_OUT_FILE, head, 0); \ fprintf (TB_OUT_FILE, "\n"); \ } \ } while (0) TB_SET_HEAD (begin); /* Store in a hashtable information about previous and upper statements. */ { TB_up_ht = htab_create (1023, htab_hash_pointer, &TB_parent_eq, NULL); TB_update_up (head); } while (24) { fprintf (TB_OUT_FILE, "TB> "); rd = TB_getline (&input, &input_size, TB_IN_FILE); if (rd == -1) /* EOF. */ goto ret; if (rd != 1) /* Get a new command. Otherwise the user just pressed enter, and thus she expects the last command to be reexecuted. */ tbc = TB_get_command (input); switch (tbc) { case TB_UPDATE_UP: TB_update_up (head); break; case TB_MAX: if (head && (INTEGRAL_TYPE_P (head) || TREE_CODE (head) == REAL_TYPE)) TB_SET_HEAD (TYPE_MAX_VALUE (head)); else TB_WF; break; case TB_MIN: if (head && (INTEGRAL_TYPE_P (head) || TREE_CODE (head) == REAL_TYPE)) TB_SET_HEAD (TYPE_MIN_VALUE (head)); else TB_WF; break; case TB_ELT: if (head && TREE_CODE (head) == TREE_VEC) { /* This command takes another argument: the element number: for example "elt 1". */ TB_NIY; } else if (head && TREE_CODE (head) == VECTOR_CST) { /* This command takes another argument: the element number: for example "elt 1". */ TB_NIY; } else TB_WF; break; case TB_VALUE: if (head && TREE_CODE (head) == TREE_LIST) TB_SET_HEAD (TREE_VALUE (head)); else TB_WF; break; case TB_PURPOSE: if (head && TREE_CODE (head) == TREE_LIST) TB_SET_HEAD (TREE_PURPOSE (head)); else TB_WF; break; case TB_IMAG: if (head && TREE_CODE (head) == COMPLEX_CST) TB_SET_HEAD (TREE_IMAGPART (head)); else TB_WF; break; case TB_REAL: if (head && TREE_CODE (head) == COMPLEX_CST) TB_SET_HEAD (TREE_REALPART (head)); else TB_WF; break; case TB_BLOCK: if (head && TREE_CODE (head) == BIND_EXPR) TB_SET_HEAD (TREE_OPERAND (head, 2)); else TB_WF; break; case TB_SUBBLOCKS: if (head && TREE_CODE (head) == BLOCK) TB_SET_HEAD (BLOCK_SUBBLOCKS (head)); else TB_WF; break; case TB_SUPERCONTEXT: if (head && TREE_CODE (head) == BLOCK) TB_SET_HEAD (BLOCK_SUPERCONTEXT (head)); else TB_WF; break; case TB_VARS: if (head && TREE_CODE (head) == BLOCK) TB_SET_HEAD (BLOCK_VARS (head)); else if (head && TREE_CODE (head) == BIND_EXPR) TB_SET_HEAD (TREE_OPERAND (head, 0)); else TB_WF; break; case TB_REFERENCE_TO_THIS: if (head && TYPE_P (head)) TB_SET_HEAD (TYPE_REFERENCE_TO (head)); else TB_WF; break; case TB_POINTER_TO_THIS: if (head && TYPE_P (head)) TB_SET_HEAD (TYPE_POINTER_TO (head)); else TB_WF; break; case TB_BASETYPE: if (head && TREE_CODE (head) == OFFSET_TYPE) TB_SET_HEAD (TYPE_OFFSET_BASETYPE (head)); else TB_WF; break; case TB_ARG_TYPES: if (head && (TREE_CODE (head) == FUNCTION_TYPE || TREE_CODE (head) == METHOD_TYPE)) TB_SET_HEAD (TYPE_ARG_TYPES (head)); else TB_WF; break; case TB_METHOD_BASE_TYPE: if (head && (TREE_CODE (head) == FUNCTION_TYPE || TREE_CODE (head) == METHOD_TYPE) && TYPE_METHOD_BASETYPE (head)) TB_SET_HEAD (TYPE_METHOD_BASETYPE (head)); else TB_WF; break; case TB_FIELDS: if (head && (TREE_CODE (head) == RECORD_TYPE || TREE_CODE (head) == UNION_TYPE || TREE_CODE (head) == QUAL_UNION_TYPE)) TB_SET_HEAD (TYPE_FIELDS (head)); else TB_WF; break; case TB_DOMAIN: if (head && TREE_CODE (head) == ARRAY_TYPE) TB_SET_HEAD (TYPE_DOMAIN (head)); else TB_WF; break; case TB_VALUES: if (head && TREE_CODE (head) == ENUMERAL_TYPE) TB_SET_HEAD (TYPE_VALUES (head)); else TB_WF; break; case TB_ARG_TYPE_AS_WRITTEN: if (head && TREE_CODE (head) == PARM_DECL) TB_SET_HEAD (DECL_ARG_TYPE_AS_WRITTEN (head)); else TB_WF; break; case TB_ARG_TYPE: if (head && TREE_CODE (head) == PARM_DECL) TB_SET_HEAD (DECL_ARG_TYPE (head)); else TB_WF; break; case TB_INITIAL: if (head && DECL_P (head)) TB_SET_HEAD (DECL_INITIAL (head)); else TB_WF; break; case TB_RESULT: if (head && DECL_P (head)) TB_SET_HEAD (DECL_RESULT_FLD (head)); else TB_WF; break; case TB_ARGUMENTS: if (head && DECL_P (head)) TB_SET_HEAD (DECL_ARGUMENTS (head)); else TB_WF; break; case TB_ABSTRACT_ORIGIN: if (head && DECL_P (head)) TB_SET_HEAD (DECL_ABSTRACT_ORIGIN (head)); else if (head && TREE_CODE (head) == BLOCK) TB_SET_HEAD (BLOCK_ABSTRACT_ORIGIN (head)); else TB_WF; break; case TB_ATTRIBUTES: if (head && DECL_P (head)) TB_SET_HEAD (DECL_ATTRIBUTES (head)); else if (head && TYPE_P (head)) TB_SET_HEAD (TYPE_ATTRIBUTES (head)); else TB_WF; break; case TB_CONTEXT: if (head && DECL_P (head)) TB_SET_HEAD (DECL_CONTEXT (head)); else if (head && TYPE_P (head) && TYPE_CONTEXT (head)) TB_SET_HEAD (TYPE_CONTEXT (head)); else TB_WF; break; case TB_OFFSET: if (head && TREE_CODE (head) == FIELD_DECL) TB_SET_HEAD (DECL_FIELD_OFFSET (head)); else TB_WF; break; case TB_BIT_OFFSET: if (head && TREE_CODE (head) == FIELD_DECL) TB_SET_HEAD (DECL_FIELD_BIT_OFFSET (head)); else TB_WF; break; case TB_UNIT_SIZE: if (head && DECL_P (head)) TB_SET_HEAD (DECL_SIZE_UNIT (head)); else if (head && TYPE_P (head)) TB_SET_HEAD (TYPE_SIZE_UNIT (head)); else TB_WF; break; case TB_SIZE: if (head && DECL_P (head)) TB_SET_HEAD (DECL_SIZE (head)); else if (head && TYPE_P (head)) TB_SET_HEAD (TYPE_SIZE (head)); else TB_WF; break; case TB_TYPE: if (head && TREE_TYPE (head)) TB_SET_HEAD (TREE_TYPE (head)); else TB_WF; break; case TB_DECL_SAVED_TREE: if (head && TREE_CODE (head) == FUNCTION_DECL && DECL_SAVED_TREE (head)) TB_SET_HEAD (DECL_SAVED_TREE (head)); else TB_WF; break; case TB_BODY: if (head && TREE_CODE (head) == BIND_EXPR) TB_SET_HEAD (TREE_OPERAND (head, 1)); else TB_WF; break; case TB_CHILD_0: if (head && EXPR_P (head) && TREE_OPERAND (head, 0)) TB_SET_HEAD (TREE_OPERAND (head, 0)); else TB_WF; break; case TB_CHILD_1: if (head && EXPR_P (head) && TREE_OPERAND (head, 1)) TB_SET_HEAD (TREE_OPERAND (head, 1)); else TB_WF; break; case TB_CHILD_2: if (head && EXPR_P (head) && TREE_OPERAND (head, 2)) TB_SET_HEAD (TREE_OPERAND (head, 2)); else TB_WF; break; case TB_CHILD_3: if (head && EXPR_P (head) && TREE_OPERAND (head, 3)) TB_SET_HEAD (TREE_OPERAND (head, 3)); else TB_WF; break; case TB_PRINT: if (head) debug_tree (head); else TB_WF; break; case TB_PRETTY_PRINT: if (head) { print_generic_stmt (TB_OUT_FILE, head, 0); fprintf (TB_OUT_FILE, "\n"); } else TB_WF; break; case TB_SEARCH_NAME: break; case TB_SEARCH_CODE: { enum tree_code code; char *arg_text; arg_text = strchr (input, ' '); if (arg_text == NULL) { fprintf (TB_OUT_FILE, "First argument is missing. This isn't a valid search command. \n"); break; } code = TB_get_tree_code (arg_text + 1); /* Search in the subtree a node with the given code. */ { tree res; res = walk_tree (&head, find_node_with_code, &code, NULL); if (res == NULL_TREE) { fprintf (TB_OUT_FILE, "There's no node with this code (reachable via the walk_tree function from this node).\n"); } else { fprintf (TB_OUT_FILE, "Achoo! I got this node in the tree.\n"); TB_SET_HEAD (res); } } break; } #define TB_MOVE_HEAD(FCT) do { \ if (head) \ { \ tree t; \ t = FCT (head); \ if (t) \ TB_SET_HEAD (t); \ else \ TB_WF; \ } \ else \ TB_WF; \ } while (0) case TB_FIRST: TB_MOVE_HEAD (TB_first_in_bind); break; case TB_LAST: TB_MOVE_HEAD (TB_last_in_bind); break; case TB_UP: TB_MOVE_HEAD (TB_up_expr); break; case TB_PREV: TB_MOVE_HEAD (TB_prev_expr); break; case TB_NEXT: TB_MOVE_HEAD (TB_next_expr); break; case TB_HPREV: /* This command is a little bit special, since it deals with history stack. For this reason it should keep the "head = ..." statement and not use TB_MOVE_HEAD. */ if (head) { tree t; t = TB_history_prev (); if (t) { head = t; if (TB_verbose) { print_generic_expr (TB_OUT_FILE, head, 0); fprintf (TB_OUT_FILE, "\n"); } } else TB_WF; } else TB_WF; break; case TB_CHAIN: /* Don't go further if it's the last node in this chain. */ if (head && TREE_CODE (head) == BLOCK) TB_SET_HEAD (BLOCK_CHAIN (head)); else if (head && TREE_CHAIN (head)) TB_SET_HEAD (TREE_CHAIN (head)); else TB_WF; break; case TB_FUN: /* Go up to the current function declaration. */ TB_SET_HEAD (current_function_decl); fprintf (TB_OUT_FILE, "Current function declaration.\n"); break; case TB_HELP: /* Display a help message. */ { int i; fprintf (TB_OUT_FILE, "Possible commands are:\n\n"); for (i = 0; i < TB_UNUSED_COMMAND; i++) { fprintf (TB_OUT_FILE, "%20s - %s\n", TB_COMMAND_TEXT (i), TB_COMMAND_HELP (i)); } } break; case TB_VERBOSE: if (TB_verbose == 0) { TB_verbose = 1; fprintf (TB_OUT_FILE, "Verbose on.\n"); } else { TB_verbose = 0; fprintf (TB_OUT_FILE, "Verbose off.\n"); } break; case TB_EXIT: case TB_QUIT: /* Just exit from this function. */ goto ret; default: TB_NIY; } } ret:; htab_delete (TB_up_ht); return; }
static void mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, location_t *locus, tree dirflag) { tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; /* Don't instrument marked nodes. */ if (mf_marked_p (*tp)) return; t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: { /* This is trickier than it may first appear. The reason is that we are looking at expressions from the "inside out" at this point. We may have a complex nested aggregate/array expression (e.g. "a.b[i].c"), maybe with an indirection as the leftmost operator ("p->a.b.d"), where instrumentation is necessary. Or we may have an innocent "a.b.c" expression that must not be instrumented. We need to recurse all the way down the nesting structure to figure it out: looking just at the outer node is not enough. */ tree var; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var)) { base = TREE_OPERAND (var, 0); break; } else { gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var) || component_ref_only) return; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert (ptr_type_node, elt ? elt : base); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, fold_convert (ptr_type_node, byte_position (field))); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); limit = fold_build2 (MINUS_EXPR, mf_uintptr_type, fold_build2 (PLUS_EXPR, mf_uintptr_type, convert (mf_uintptr_type, addr), size), integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); break; case TARGET_MEM_REF: addr = tree_mem_ref_addr (ptr_type_node, t); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), build_int_cst (ptr_type_node, 1)); break; case ARRAY_RANGE_REF: warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; case BIT_FIELD_REF: /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; /* If we're not dereferencing something, then the access must be ok. */ if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) return; bpu = bitsize_int (BITS_PER_UNIT); ofs = convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu); ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu); size = convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop (PLUS_EXPR, size, rem); size = size_binop (CEIL_DIV_EXPR, size, bpu); size = convert (sizetype, size); addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = convert (ptr_type_node, addr); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); } break; default: return; } mf_build_check_statement_for (base, limit, iter, locus, dirflag); }
tree expand_start_catch_block (tree decl) { tree exp; tree type; if (! doing_eh (1)) return NULL_TREE; /* Make sure this declaration is reasonable. */ if (decl && !complete_ptr_ref_or_void_ptr_p (TREE_TYPE (decl), NULL_TREE)) decl = NULL_TREE; if (decl) type = prepare_eh_type (TREE_TYPE (decl)); else type = NULL_TREE; if (decl && decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ exp = build_exc_ptr (); exp = build1 (NOP_EXPR, build_pointer_type (type), exp); exp = build2 (MINUS_EXPR, TREE_TYPE (exp), exp, TYPE_SIZE_UNIT (TREE_TYPE (exp))); exp = build_indirect_ref (exp, NULL); initialize_handler_parm (decl, exp); return type; } /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL) /* APPLE LOCAL radar 2848255 */ finish_expr_stmt (do_begin_catch (NULL_TREE)); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ /* APPLE LOCAL begin mainline 2006-02-24 4086777 */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) /* APPLE LOCAL end mainline 2006-02-24 4086777 */ { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); /* APPLE LOCAL radar 2848255 */ finish_expr_stmt (do_begin_catch (type)); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { /* APPLE LOCAL radar 2848255 */ tree init = do_begin_catch (type); exp = create_temporary_var (ptr_type_node); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, NULL_TREE, LOOKUP_ONLYCONVERTING); finish_expr_stmt (build_modify_expr (exp, INIT_EXPR, init)); initialize_handler_parm (decl, exp); } return type; }
static tree cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) { tree defparm, parm; int i; if (fn == NULL) return NULL; defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); if (arg2) defparm = TREE_CHAIN (defparm); if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) { tree inner_type = TREE_TYPE (arg1); tree start1, end1, p1; tree start2 = NULL, p2 = NULL; tree ret = NULL, lab, t; start1 = arg1; start2 = arg2; do { inner_type = TREE_TYPE (inner_type); start1 = build4 (ARRAY_REF, inner_type, start1, size_zero_node, NULL, NULL); if (arg2) start2 = build4 (ARRAY_REF, inner_type, start2, size_zero_node, NULL, NULL); } while (TREE_CODE (inner_type) == ARRAY_TYPE); start1 = build_fold_addr_expr (start1); if (arg2) start2 = build_fold_addr_expr (start2); end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); end1 = fold_convert (TREE_TYPE (start1), end1); end1 = build2 (PLUS_EXPR, TREE_TYPE (start1), start1, end1); p1 = create_tmp_var (TREE_TYPE (start1), NULL); t = build2 (MODIFY_EXPR, void_type_node, p1, start1); append_to_statement_list (t, &ret); if (arg2) { p2 = create_tmp_var (TREE_TYPE (start2), NULL); t = build2 (MODIFY_EXPR, void_type_node, p2, start2); append_to_statement_list (t, &ret); } lab = create_artificial_label (); t = build1 (LABEL_EXPR, void_type_node, lab); append_to_statement_list (t, &ret); t = tree_cons (NULL, p1, NULL); if (arg2) t = tree_cons (NULL, p2, t); /* Handle default arguments. */ i = 1 + (arg2 != NULL); for (parm = defparm; parm != void_list_node; parm = TREE_CHAIN (parm)) t = tree_cons (NULL, convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i++), t); t = build_call (fn, nreverse (t)); append_to_statement_list (t, &ret); t = fold_convert (TREE_TYPE (p1), TYPE_SIZE_UNIT (inner_type)); t = build2 (PLUS_EXPR, TREE_TYPE (p1), p1, t); t = build2 (MODIFY_EXPR, void_type_node, p1, t); append_to_statement_list (t, &ret); if (arg2) { t = fold_convert (TREE_TYPE (p2), TYPE_SIZE_UNIT (inner_type)); t = build2 (PLUS_EXPR, TREE_TYPE (p2), p2, t); t = build2 (MODIFY_EXPR, void_type_node, p2, t); append_to_statement_list (t, &ret); } t = build2 (NE_EXPR, boolean_type_node, p1, end1); t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); append_to_statement_list (t, &ret); return ret; } else { tree t = tree_cons (NULL, build_fold_addr_expr (arg1), NULL); if (arg2) t = tree_cons (NULL, build_fold_addr_expr (arg2), t); /* Handle default arguments. */ i = 1 + (arg2 != NULL); for (parm = defparm; parm != void_list_node; parm = TREE_CHAIN (parm)) t = tree_cons (NULL, convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i++), t); return build_call (fn, nreverse (t)); } }