/* Add code: __thread gcov* __gcov_indirect_call_counters; // pointer to actual counter __thread void* __gcov_indirect_call_callee; // actual callee address */ static void init_ic_make_global_vars (void) { tree gcov_type_ptr; ptr_void = build_pointer_type (void_type_node); /* Workaround for binutils bug 14342. Once it is fixed, remove lto path. */ if (flag_lto) { ic_void_ptr_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__gcov_indirect_call_callee_ltopriv"), ptr_void); TREE_PUBLIC (ic_void_ptr_var) = 1; DECL_COMMON (ic_void_ptr_var) = 1; DECL_VISIBILITY (ic_void_ptr_var) = VISIBILITY_HIDDEN; DECL_VISIBILITY_SPECIFIED (ic_void_ptr_var) = true; } else { ic_void_ptr_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__gcov_indirect_call_callee"), ptr_void); TREE_PUBLIC (ic_void_ptr_var) = 1; DECL_EXTERNAL (ic_void_ptr_var) = 1; } TREE_STATIC (ic_void_ptr_var) = 1; DECL_ARTIFICIAL (ic_void_ptr_var) = 1; DECL_INITIAL (ic_void_ptr_var) = NULL; if (targetm.have_tls) DECL_TLS_MODEL (ic_void_ptr_var) = decl_default_tls_model (ic_void_ptr_var); varpool_finalize_decl (ic_void_ptr_var); gcov_type_ptr = build_pointer_type (get_gcov_type ()); /* Workaround for binutils bug 14342. Once it is fixed, remove lto path. */ if (flag_lto) { ic_gcov_type_ptr_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__gcov_indirect_call_counters_ltopriv"), gcov_type_ptr); TREE_PUBLIC (ic_gcov_type_ptr_var) = 1; DECL_COMMON (ic_gcov_type_ptr_var) = 1; DECL_VISIBILITY (ic_gcov_type_ptr_var) = VISIBILITY_HIDDEN; DECL_VISIBILITY_SPECIFIED (ic_gcov_type_ptr_var) = true; } else { ic_gcov_type_ptr_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__gcov_indirect_call_counters"), gcov_type_ptr); TREE_PUBLIC (ic_gcov_type_ptr_var) = 1; DECL_EXTERNAL (ic_gcov_type_ptr_var) = 1; } TREE_STATIC (ic_gcov_type_ptr_var) = 1; DECL_ARTIFICIAL (ic_gcov_type_ptr_var) = 1; DECL_INITIAL (ic_gcov_type_ptr_var) = NULL; if (targetm.have_tls) DECL_TLS_MODEL (ic_gcov_type_ptr_var) = decl_default_tls_model (ic_gcov_type_ptr_var); varpool_finalize_decl (ic_gcov_type_ptr_var); }
static tree trans_runtime_error_vararg (bool error, locus* where, const char* msgid, va_list ap) { stmtblock_t block; tree tmp; tree arg, arg2; tree *argarray; tree fntype; char *message; const char *p; int line, nargs, i; location_t loc; /* Compute the number of extra arguments from the format string. */ for (p = msgid, nargs = 0; *p; p++) if (*p == '%') { p++; if (*p != '%') nargs++; } /* The code to generate the error. */ gfc_start_block (&block); if (where) { line = LOCATION_LINE (where->lb->location); asprintf (&message, "At line %d of file %s", line, where->lb->file->filename); } else asprintf (&message, "In file '%s', around line %d", gfc_source_file, input_line + 1); arg = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const (message)); free (message); asprintf (&message, "%s", _(msgid)); arg2 = gfc_build_addr_expr (pchar_type_node, gfc_build_localized_cstring_const (message)); free (message); /* Build the argument array. */ argarray = XALLOCAVEC (tree, nargs + 2); argarray[0] = arg; argarray[1] = arg2; for (i = 0; i < nargs; i++) argarray[2 + i] = va_arg (ap, tree); /* Build the function call to runtime_(warning,error)_at; because of the variable number of arguments, we can't use build_call_expr_loc dinput_location, irectly. */ if (error) fntype = TREE_TYPE (gfor_fndecl_runtime_error_at); else fntype = TREE_TYPE (gfor_fndecl_runtime_warning_at); loc = where ? where->lb->location : input_location; tmp = fold_builtin_call_array (loc, TREE_TYPE (fntype), fold_build1_loc (loc, ADDR_EXPR, build_pointer_type (fntype), error ? gfor_fndecl_runtime_error_at : gfor_fndecl_runtime_warning_at), nargs + 2, argarray); gfc_add_expr_to_block (&block, tmp); return gfc_finish_block (&block); }
static void mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, location_t *locus, tree dirflag) { tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; /* Don't instrument marked nodes. */ if (mf_marked_p (*tp)) return; t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: { /* This is trickier than it may first appear. The reason is that we are looking at expressions from the "inside out" at this point. We may have a complex nested aggregate/array expression (e.g. "a.b[i].c"), maybe with an indirection as the leftmost operator ("p->a.b.d"), where instrumentation is necessary. Or we may have an innocent "a.b.c" expression that must not be instrumented. We need to recurse all the way down the nesting structure to figure it out: looking just at the outer node is not enough. */ tree var; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var)) { base = TREE_OPERAND (var, 0); break; } else { gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var) || component_ref_only) return; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert (ptr_type_node, elt ? elt : base); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, fold_convert (ptr_type_node, byte_position (field))); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); limit = fold_build2 (MINUS_EXPR, mf_uintptr_type, fold_build2 (PLUS_EXPR, mf_uintptr_type, convert (mf_uintptr_type, addr), size), integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); break; case TARGET_MEM_REF: addr = tree_mem_ref_addr (ptr_type_node, t); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), build_int_cst (ptr_type_node, 1)); break; case ARRAY_RANGE_REF: warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; case BIT_FIELD_REF: /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; /* If we're not dereferencing something, then the access must be ok. */ if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) return; bpu = bitsize_int (BITS_PER_UNIT); ofs = convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu); ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu); size = convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop (PLUS_EXPR, size, rem); size = size_binop (CEIL_DIV_EXPR, size, bpu); size = convert (sizetype, size); addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = convert (ptr_type_node, addr); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); } break; default: return; } mf_build_check_statement_for (base, limit, iter, locus, dirflag); }
void gfc_init_types (void) { char name_buf[16]; int index; tree type; unsigned n; unsigned HOST_WIDE_INT hi; unsigned HOST_WIDE_INT lo; /* Create and name the types. */ #define PUSH_TYPE(name, node) \ pushdecl (build_decl (TYPE_DECL, get_identifier (name), node)) for (index = 0; gfc_integer_kinds[index].kind != 0; ++index) { type = gfc_build_int_type (&gfc_integer_kinds[index]); gfc_integer_types[index] = type; snprintf (name_buf, sizeof(name_buf), "int%d", gfc_integer_kinds[index].kind); PUSH_TYPE (name_buf, type); } for (index = 0; gfc_logical_kinds[index].kind != 0; ++index) { type = gfc_build_logical_type (&gfc_logical_kinds[index]); gfc_logical_types[index] = type; snprintf (name_buf, sizeof(name_buf), "logical%d", gfc_logical_kinds[index].kind); PUSH_TYPE (name_buf, type); } for (index = 0; gfc_real_kinds[index].kind != 0; index++) { type = gfc_build_real_type (&gfc_real_kinds[index]); gfc_real_types[index] = type; snprintf (name_buf, sizeof(name_buf), "real%d", gfc_real_kinds[index].kind); PUSH_TYPE (name_buf, type); type = gfc_build_complex_type (type); gfc_complex_types[index] = type; snprintf (name_buf, sizeof(name_buf), "complex%d", gfc_real_kinds[index].kind); PUSH_TYPE (name_buf, type); } gfc_character1_type_node = build_type_variant (unsigned_char_type_node, 0, 0); PUSH_TYPE ("char", gfc_character1_type_node); PUSH_TYPE ("byte", unsigned_char_type_node); PUSH_TYPE ("void", void_type_node); /* DBX debugging output gets upset if these aren't set. */ if (!TYPE_NAME (integer_type_node)) PUSH_TYPE ("c_integer", integer_type_node); if (!TYPE_NAME (char_type_node)) PUSH_TYPE ("c_char", char_type_node); #undef PUSH_TYPE pvoid_type_node = build_pointer_type (void_type_node); ppvoid_type_node = build_pointer_type (pvoid_type_node); pchar_type_node = build_pointer_type (gfc_character1_type_node); gfc_array_index_type = gfc_get_int_type (gfc_index_integer_kind); gfc_array_range_type = build_range_type (gfc_array_index_type, build_int_cst (gfc_array_index_type, 0), NULL_TREE); /* The maximum array element size that can be handled is determined by the number of bits available to store this field in the array descriptor. */ n = TYPE_PRECISION (gfc_array_index_type) - GFC_DTYPE_SIZE_SHIFT; lo = ~ (unsigned HOST_WIDE_INT) 0; if (n > HOST_BITS_PER_WIDE_INT) hi = lo >> (2*HOST_BITS_PER_WIDE_INT - n); else
void maybe_add_lambda_conv_op (tree type) { bool nested = (cfun != NULL); bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); tree callop = lambda_function (type); if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) return; if (processing_template_decl) return; bool const generic_lambda_p = generic_lambda_fn_p (callop); if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) { /* If the op() wasn't instantiated due to errors, give up. */ gcc_assert (errorcount || sorrycount); return; } /* Non-template conversion operators are defined directly with build_call_a and using DIRECT_ARGVEC for arguments (including 'this'). Templates are deferred and the CALL is built in-place. In the case of a deduced return call op, the decltype expression, DECLTYPE_CALL, used as a substitute for the return type is also built in-place. The arguments of DECLTYPE_CALL in the return expression may differ in flags from those in the body CALL. In particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in the body CALL, but not in DECLTYPE_CALL. */ vec<tree, va_gc> *direct_argvec = 0; tree decltype_call = 0, call = 0; tree optype = TREE_TYPE (callop); tree fn_result = TREE_TYPE (optype); if (generic_lambda_p) { /* Prepare the dependent member call for the static member function '_FUN' and, potentially, prepare another call to be used in a decltype return expression for a deduced return call op to allow for simple implementation of the conversion operator. */ tree instance = build_nop (type, null_pointer_node); tree objfn = build_min (COMPONENT_REF, NULL_TREE, instance, DECL_NAME (callop), NULL_TREE); int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; call = prepare_op_call (objfn, nargs); if (type_uses_auto (fn_result)) decltype_call = prepare_op_call (objfn, nargs); } else { direct_argvec = make_tree_vector (); direct_argvec->quick_push (build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)), null_pointer_node)); } /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to declare the static member function "_FUN" below. For each arg append to DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated call args (for the template case). If a parameter pack is found, expand it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ tree fn_args = NULL_TREE; { int ix = 0; tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); tree tgt; while (src) { tree new_node = copy_node (src); if (!fn_args) fn_args = tgt = new_node; else { TREE_CHAIN (tgt) = new_node; tgt = new_node; } mark_exp_read (tgt); if (generic_lambda_p) { if (DECL_PACK_P (tgt)) { tree a = make_pack_expansion (tgt); if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); PACK_EXPANSION_LOCAL_P (a) = true; CALL_EXPR_ARG (call, ix) = a; } else { tree a = convert_from_reference (tgt); CALL_EXPR_ARG (call, ix) = a; if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); } ++ix; } else vec_safe_push (direct_argvec, tgt); src = TREE_CHAIN (src); } } if (generic_lambda_p) { if (decltype_call) { ++processing_template_decl; fn_result = finish_decltype_type (decltype_call, /*id_expression_or_member_access_p=*/false, tf_warning_or_error); --processing_template_decl; } } else call = build_call_a (callop, direct_argvec->length (), direct_argvec->address ()); CALL_FROM_THUNK_P (call) = 1; tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); stattype = (cp_build_type_attribute_variant (stattype, TYPE_ATTRIBUTES (optype))); /* First build up the conversion op. */ tree rettype = build_pointer_type (stattype); tree name = mangle_conv_op_name_for_type (rettype); tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); tree fntype = build_method_type_directly (thistype, rettype, void_list_node); tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); tree fn = convfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); DECL_ALIGN (fn) = MINIMUM_METHOD_BOUNDARY; SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); /* Generic thunk code fails for varargs; we'll complain in mark_used if the conversion op is used. */ if (varargs_function_p (callop)) { DECL_DELETED_FN (fn) = 1; return; } /* Now build up the thunk to be returned. */ name = get_identifier ("_FUN"); tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); fn = statfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_STATIC_FUNCTION_P (fn) = 1; DECL_ARGUMENTS (fn) = fn_args; for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) { /* Avoid duplicate -Wshadow warnings. */ DECL_NAME (arg) = NULL_TREE; DECL_CONTEXT (arg) = fn; } if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); if (flag_sanitize & SANITIZE_NULL) { /* Don't UBsan this function; we're deliberately calling op() with a null object argument. */ tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"), NULL_TREE); cplus_decl_attributes (&fn, attrs, 0); } add_method (type, fn, NULL_TREE); if (nested) push_function_context (); else /* Still increment function_depth so that we don't GC in the middle of an expression. */ ++function_depth; /* Generate the body of the thunk. */ start_preparsed_function (statfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); if (DECL_ONE_ONLY (statfn)) { /* Put the thunk in the same comdat group as the call op. */ cgraph_node::get_create (statfn)->add_to_same_comdat_group (cgraph_node::get_create (callop)); } tree body = begin_function_body (); tree compound_stmt = begin_compound_stmt (0); if (!generic_lambda_p) { set_flags_from_callee (call); if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); } call = convert_from_reference (call); finish_return_stmt (call); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); /* Generate the body of the conversion op. */ start_preparsed_function (convfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); body = begin_function_body (); compound_stmt = begin_compound_stmt (0); /* decl_needed_p needs to see that it's used. */ TREE_USED (statfn) = 1; finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); if (nested) pop_function_context (); else --function_depth; }
tree prepare_eh_table_type (tree type) { tree exp; tree *slot; const char *name; char *buf; tree decl; tree utf8_ref; /* The "type" (match_info) in a (Java) exception table is a pointer to: * a) NULL - meaning match any type in a try-finally. * b) a pointer to a pointer to a class. * c) a pointer to a pointer to a utf8_ref. The pointer is * rewritten to point to the appropriate class. */ if (type == NULL_TREE) return NULL_TREE; if (TYPE_TO_RUNTIME_MAP (output_class) == NULL) TYPE_TO_RUNTIME_MAP (output_class) = java_treetreehash_create (10); slot = java_treetreehash_new (TYPE_TO_RUNTIME_MAP (output_class), type); if (*slot != NULL) return TREE_VALUE (*slot); if (is_compiled_class (type) && !flag_indirect_dispatch) { name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), ptr_type_node); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; DECL_INITIAL (decl) = build_class_ref (type); layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl); } else { utf8_ref = build_utf8_ref (DECL_NAME (TYPE_NAME (type))); name = IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (utf8_ref, 0))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), utf8const_ptr_type); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (utf8const_ptr_type), decl); CONSTRUCTOR_APPEND_ELT (TYPE_CATCH_CLASSES (output_class), NULL_TREE, make_catch_class_record (exp, utf8_ref)); } exp = convert (ptr_type_node, exp); *slot = tree_cons (type, exp, NULL_TREE); return exp; }
static tree build_addr_sum (tree type, tree addr, tree offset) { tree ptr_type = build_pointer_type (type); return fold_build_pointer_plus (fold_convert (ptr_type, addr), offset); }
tree decl_attributes (tree *node, tree attributes, int flags) { tree a; tree returned_attrs = NULL_TREE; if (TREE_TYPE (*node) == error_mark_node || attributes == error_mark_node) return NULL_TREE; if (!attributes_initialized) init_attributes (); /* If this is a function and the user used #pragma GCC optimize, add the options to the attribute((optimize(...))) list. */ if (TREE_CODE (*node) == FUNCTION_DECL && current_optimize_pragma) { tree cur_attr = lookup_attribute ("optimize", attributes); tree opts = copy_list (current_optimize_pragma); if (! cur_attr) attributes = tree_cons (get_identifier ("optimize"), opts, attributes); else TREE_VALUE (cur_attr) = chainon (opts, TREE_VALUE (cur_attr)); } if (TREE_CODE (*node) == FUNCTION_DECL && optimization_current_node != optimization_default_node && !DECL_FUNCTION_SPECIFIC_OPTIMIZATION (*node)) DECL_FUNCTION_SPECIFIC_OPTIMIZATION (*node) = optimization_current_node; /* If this is a function and the user used #pragma GCC target, add the options to the attribute((target(...))) list. */ if (TREE_CODE (*node) == FUNCTION_DECL && current_target_pragma && targetm.target_option.valid_attribute_p (*node, NULL_TREE, current_target_pragma, 0)) { tree cur_attr = lookup_attribute ("target", attributes); tree opts = copy_list (current_target_pragma); if (! cur_attr) attributes = tree_cons (get_identifier ("target"), opts, attributes); else TREE_VALUE (cur_attr) = chainon (opts, TREE_VALUE (cur_attr)); } /* A "naked" function attribute implies "noinline" and "noclone" for those targets that support it. */ if (TREE_CODE (*node) == FUNCTION_DECL && attributes && lookup_attribute_spec (get_identifier ("naked")) && lookup_attribute ("naked", attributes) != NULL) { if (lookup_attribute ("noinline", attributes) == NULL) attributes = tree_cons (get_identifier ("noinline"), NULL, attributes); if (lookup_attribute ("noclone", attributes) == NULL) attributes = tree_cons (get_identifier ("noclone"), NULL, attributes); } targetm.insert_attributes (*node, &attributes); for (a = attributes; a; a = TREE_CHAIN (a)) { tree ns = get_attribute_namespace (a); tree name = get_attribute_name (a); tree args = TREE_VALUE (a); tree *anode = node; const struct attribute_spec *spec = lookup_scoped_attribute_spec (ns, name); bool no_add_attrs = 0; int fn_ptr_quals = 0; tree fn_ptr_tmp = NULL_TREE; if (spec == NULL) { if (!(flags & (int) ATTR_FLAG_BUILT_IN)) { if (ns == NULL_TREE || !cxx11_attribute_p (a)) warning (OPT_Wattributes, "%qE attribute directive ignored", name); else warning (OPT_Wattributes, "%<%E::%E%> scoped attribute directive ignored", ns, name); } continue; } else if (list_length (args) < spec->min_length || (spec->max_length >= 0 && list_length (args) > spec->max_length)) { error ("wrong number of arguments specified for %qE attribute", name); continue; } gcc_assert (is_attribute_p (spec->name, name)); if (TYPE_P (*node) && cxx11_attribute_p (a) && !(flags & ATTR_FLAG_TYPE_IN_PLACE)) { /* This is a c++11 attribute that appertains to a type-specifier, outside of the definition of, a class type. Ignore it. */ if (warning (OPT_Wattributes, "attribute ignored")) inform (input_location, "an attribute that appertains to a type-specifier " "is ignored"); continue; } if (spec->decl_required && !DECL_P (*anode)) { if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT | (int) ATTR_FLAG_ARRAY_NEXT)) { /* Pass on this attribute to be tried again. */ returned_attrs = tree_cons (name, args, returned_attrs); continue; } else { warning (OPT_Wattributes, "%qE attribute does not apply to types", name); continue; } } /* If we require a type, but were passed a decl, set up to make a new type and update the one in the decl. ATTR_FLAG_TYPE_IN_PLACE would have applied if we'd been passed a type, but we cannot modify the decl's type in place here. */ if (spec->type_required && DECL_P (*anode)) { anode = &TREE_TYPE (*anode); flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE; } if (spec->function_type_required && TREE_CODE (*anode) != FUNCTION_TYPE && TREE_CODE (*anode) != METHOD_TYPE) { if (TREE_CODE (*anode) == POINTER_TYPE && (TREE_CODE (TREE_TYPE (*anode)) == FUNCTION_TYPE || TREE_CODE (TREE_TYPE (*anode)) == METHOD_TYPE)) { /* OK, this is a bit convoluted. We can't just make a copy of the pointer type and modify its TREE_TYPE, because if we change the attributes of the target type the pointer type needs to have a different TYPE_MAIN_VARIANT. So we pull out the target type now, frob it as appropriate, and rebuild the pointer type later. This would all be simpler if attributes were part of the declarator, grumble grumble. */ fn_ptr_tmp = TREE_TYPE (*anode); fn_ptr_quals = TYPE_QUALS (*anode); anode = &fn_ptr_tmp; flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE; } else if (flags & (int) ATTR_FLAG_FUNCTION_NEXT) { /* Pass on this attribute to be tried again. */ returned_attrs = tree_cons (name, args, returned_attrs); continue; } if (TREE_CODE (*anode) != FUNCTION_TYPE && TREE_CODE (*anode) != METHOD_TYPE) { warning (OPT_Wattributes, "%qE attribute only applies to function types", name); continue; } } if (TYPE_P (*anode) && (flags & (int) ATTR_FLAG_TYPE_IN_PLACE) && TYPE_SIZE (*anode) != NULL_TREE) { warning (OPT_Wattributes, "type attributes ignored after type is already defined"); continue; } if (spec->handler != NULL) { int cxx11_flag = cxx11_attribute_p (a) ? ATTR_FLAG_CXX11 : 0; returned_attrs = chainon ((*spec->handler) (anode, name, args, flags|cxx11_flag, &no_add_attrs), returned_attrs); } /* Layout the decl in case anything changed. */ if (spec->type_required && DECL_P (*node) && (VAR_P (*node) || TREE_CODE (*node) == PARM_DECL || TREE_CODE (*node) == RESULT_DECL)) relayout_decl (*node); if (!no_add_attrs) { tree old_attrs; tree a; if (DECL_P (*anode)) old_attrs = DECL_ATTRIBUTES (*anode); else old_attrs = TYPE_ATTRIBUTES (*anode); for (a = lookup_attribute (spec->name, old_attrs); a != NULL_TREE; a = lookup_attribute (spec->name, TREE_CHAIN (a))) { if (simple_cst_equal (TREE_VALUE (a), args) == 1) break; } if (a == NULL_TREE) { /* This attribute isn't already in the list. */ if (DECL_P (*anode)) DECL_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs); else if (flags & (int) ATTR_FLAG_TYPE_IN_PLACE) { TYPE_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs); /* If this is the main variant, also push the attributes out to the other variants. */ if (*anode == TYPE_MAIN_VARIANT (*anode)) { tree variant; for (variant = *anode; variant; variant = TYPE_NEXT_VARIANT (variant)) { if (TYPE_ATTRIBUTES (variant) == old_attrs) TYPE_ATTRIBUTES (variant) = TYPE_ATTRIBUTES (*anode); else if (!lookup_attribute (spec->name, TYPE_ATTRIBUTES (variant))) TYPE_ATTRIBUTES (variant) = tree_cons (name, args, TYPE_ATTRIBUTES (variant)); } } } else *anode = build_type_attribute_variant (*anode, tree_cons (name, args, old_attrs)); } } if (fn_ptr_tmp) { /* Rebuild the function pointer type and put it in the appropriate place. */ fn_ptr_tmp = build_pointer_type (fn_ptr_tmp); if (fn_ptr_quals) fn_ptr_tmp = build_qualified_type (fn_ptr_tmp, fn_ptr_quals); if (DECL_P (*node)) TREE_TYPE (*node) = fn_ptr_tmp; else { gcc_assert (TREE_CODE (*node) == POINTER_TYPE); *node = fn_ptr_tmp; } } } return returned_attrs; }
tree ubsan_instrument_bounds (location_t loc, tree array, tree *index, bool ignore_off_by_one) { tree type = TREE_TYPE (array); tree domain = TYPE_DOMAIN (type); if (domain == NULL_TREE || TYPE_MAX_VALUE (domain) == NULL_TREE) return NULL_TREE; tree bound = TYPE_MAX_VALUE (domain); if (ignore_off_by_one) bound = fold_build2 (PLUS_EXPR, TREE_TYPE (bound), bound, build_int_cst (TREE_TYPE (bound), 1)); /* Detect flexible array members and suchlike, unless -fsanitize=bounds-strict. */ tree base = get_base_address (array); if ((flag_sanitize & SANITIZE_BOUNDS_STRICT) == 0 && TREE_CODE (array) == COMPONENT_REF && base && (INDIRECT_REF_P (base) || TREE_CODE (base) == MEM_REF)) { tree next = NULL_TREE; tree cref = array; /* Walk all structs/unions. */ while (TREE_CODE (cref) == COMPONENT_REF) { if (TREE_CODE (TREE_TYPE (TREE_OPERAND (cref, 0))) == RECORD_TYPE) for (next = DECL_CHAIN (TREE_OPERAND (cref, 1)); next && TREE_CODE (next) != FIELD_DECL; next = DECL_CHAIN (next)) ; if (next) /* Not a last element. Instrument it. */ break; /* Ok, this is the last field of the structure/union. But the aggregate containing the field must be the last field too, recursively. */ cref = TREE_OPERAND (cref, 0); } if (!next) /* Don't instrument this flexible array member-like array in non-strict -fsanitize=bounds mode. */ return NULL_TREE; } /* Don't emit instrumentation in the most common cases. */ tree idx = NULL_TREE; if (TREE_CODE (*index) == INTEGER_CST) idx = *index; else if (TREE_CODE (*index) == BIT_AND_EXPR && TREE_CODE (TREE_OPERAND (*index, 1)) == INTEGER_CST) idx = TREE_OPERAND (*index, 1); if (idx && TREE_CODE (bound) == INTEGER_CST && tree_int_cst_sgn (idx) >= 0 && tree_int_cst_le (idx, bound)) return NULL_TREE; *index = save_expr (*index); /* Create a "(T *) 0" tree node to describe the array type. */ tree zero_with_type = build_int_cst (build_pointer_type (type), 0); return build_call_expr_internal_loc (loc, IFN_UBSAN_BOUNDS, void_type_node, 3, zero_with_type, *index, bound); }
tree expand_start_catch_block (tree decl) { tree exp = NULL_TREE; tree type; bool is_java; if (! doing_eh (1)) return NULL_TREE; /* Make sure this declaration is reasonable. */ if (decl && !complete_ptr_ref_or_void_ptr_p (TREE_TYPE (decl), NULL_TREE)) decl = NULL_TREE; if (decl) type = prepare_eh_type (TREE_TYPE (decl)); else type = NULL_TREE; is_java = false; if (decl) { tree init; if (decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ init = build_exc_ptr (); init = build1 (NOP_EXPR, build_pointer_type (type), init); init = build (MINUS_EXPR, TREE_TYPE (init), init, TYPE_SIZE_UNIT (TREE_TYPE (init))); init = build_indirect_ref (init, NULL); is_java = true; } else { /* C++ requires that we call __cxa_begin_catch to get the pointer to the actual object. */ init = do_begin_catch (); } exp = create_temporary_var (ptr_type_node); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, NULL_TREE, LOOKUP_ONLYCONVERTING); finish_expr_stmt (build_modify_expr (exp, INIT_EXPR, init)); } else finish_expr_stmt (do_begin_catch ()); /* C++ requires that we call __cxa_end_catch at the end of processing the exception. */ if (! is_java) push_eh_cleanup (type); if (decl) initialize_handler_parm (decl, exp); return type; }
static void aarch64_init_simd_builtins (void) { unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1; aarch64_init_simd_builtin_types (); /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics. Therefore we need to preserve the old __builtin scalar types. It can be removed once all the intrinsics become strongly typed using the qualifier system. */ aarch64_init_simd_builtin_scalar_types (); for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++) { bool print_type_signature_p = false; char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 }; aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i]; char namebuf[60]; tree ftype = NULL; tree fndecl = NULL; d->fcode = fcode; /* We must track two variables here. op_num is the operand number as in the RTL pattern. This is required to access the mode (e.g. V4SF mode) of the argument, from which the base type can be derived. arg_num is an index in to the qualifiers data, which gives qualifiers to the type (e.g. const unsigned). The reason these two variables may differ by one is the void return type. While all return types take the 0th entry in the qualifiers array, there is no operand for them in the RTL pattern. */ int op_num = insn_data[d->code].n_operands - 1; int arg_num = d->qualifiers[0] & qualifier_void ? op_num + 1 : op_num; tree return_type = void_type_node, args = void_list_node; tree eltype; /* Build a function type directly from the insn_data for this builtin. The build_function_type () function takes care of removing duplicates for us. */ for (; op_num >= 0; arg_num--, op_num--) { machine_mode op_mode = insn_data[d->code].operand[op_num].mode; enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num]; if (qualifiers & qualifier_unsigned) { type_signature[arg_num] = 'u'; print_type_signature_p = true; } else if (qualifiers & qualifier_poly) { type_signature[arg_num] = 'p'; print_type_signature_p = true; } else type_signature[arg_num] = 's'; /* Skip an internal operand for vget_{low, high}. */ if (qualifiers & qualifier_internal) continue; /* Some builtins have different user-facing types for certain arguments, encoded in d->mode. */ if (qualifiers & qualifier_map_mode) op_mode = d->mode; /* For pointers, we want a pointer to the basic type of the vector. */ if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode)) op_mode = GET_MODE_INNER (op_mode); eltype = aarch64_simd_builtin_type (op_mode, (qualifiers & qualifier_unsigned) != 0, (qualifiers & qualifier_poly) != 0); gcc_assert (eltype != NULL); /* Add qualifiers. */ if (qualifiers & qualifier_const) eltype = build_qualified_type (eltype, TYPE_QUAL_CONST); if (qualifiers & qualifier_pointer) eltype = build_pointer_type (eltype); /* If we have reached arg_num == 0, we are at a non-void return type. Otherwise, we are still processing arguments. */ if (arg_num == 0) return_type = eltype; else args = tree_cons (NULL_TREE, eltype, args); } ftype = build_function_type (return_type, args); gcc_assert (ftype != NULL); if (print_type_signature_p) snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s", d->name, type_signature); else snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s", d->name); fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD, NULL, NULL_TREE); aarch64_builtin_decls[fcode] = fndecl; } }
static unsigned int execute_trace () { gimple_seq body, body_bind_body, inner_cleanup, outer_cleanup; gimple inner_try, outer_try; tree record_type, func_start_decl, func_end_decl, var_decl, function_name_decl, constructor_clobber; gimple call_func_start; gimple_stmt_iterator gsi; // build record type record_type = build_type (); // build start & end function decl func_start_decl = build_function_decl ("__start_ctrace__", record_type); func_end_decl = build_function_decl ("__end_ctrace__", record_type); // init variables of current body body = gimple_body (current_function_decl); var_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__ctrace_var__"), record_type); DECL_CONTEXT (var_decl) = current_function_decl; TREE_ADDRESSABLE (var_decl) = 1; declare_vars (var_decl, body, false); TREE_USED (var_decl) = 1; // mimic __FUNCTION__ builtin. function_name_decl = make_fname_decl (); declare_vars (function_name_decl, body, false); // construct inner try // init calls call_func_start = gimple_build_call ( func_start_decl, 2, build1 (ADDR_EXPR, build_pointer_type (record_type), var_decl), build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (function_name_decl)), function_name_decl)); // make inner clean up inner_cleanup = gimple_build_call ( func_end_decl, 2, build1 (ADDR_EXPR, build_pointer_type (record_type), var_decl), build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (function_name_decl)), function_name_decl)); // update inner try body_bind_body = gimple_bind_body (body); inner_try = gimple_build_try (body_bind_body, inner_cleanup, GIMPLE_TRY_FINALLY); gsi = gsi_start (inner_try); gsi_insert_before (&gsi, call_func_start, GSI_NEW_STMT); // construct outer try constructor_clobber = make_node (CONSTRUCTOR); TREE_THIS_VOLATILE (constructor_clobber) = 1; TREE_TYPE (constructor_clobber) = TREE_TYPE (var_decl); outer_cleanup = gimple_build_assign (var_decl, constructor_clobber); // update outer try outer_try = gimple_build_try (call_func_start, outer_cleanup, GIMPLE_TRY_FINALLY); // update body bind body gimple_bind_set_body (body, outer_try); if (dump_file) { dump_function_to_file (current_function_decl, dump_file, TDF_TREE | TDF_BLOCKS | TDF_VERBOSE); } // exit (0); return 0; }
tree decl_attributes (tree *node, tree attributes, int flags) { tree a; tree returned_attrs = NULL_TREE; if (!attributes_initialized) init_attributes (); targetm.insert_attributes (*node, &attributes); for (a = attributes; a; a = TREE_CHAIN (a)) { tree name = TREE_PURPOSE (a); tree args = TREE_VALUE (a); tree *anode = node; const struct attribute_spec *spec = NULL; bool no_add_attrs = 0; tree fn_ptr_tmp = NULL_TREE; size_t i; for (i = 0; i < ARRAY_SIZE (attribute_tables); i++) { int j; for (j = 0; attribute_tables[i][j].name != NULL; j++) { if (is_attribute_p (attribute_tables[i][j].name, name)) { spec = &attribute_tables[i][j]; break; } } if (spec != NULL) break; } if (spec == NULL) { warning (OPT_Wattributes, "%qs attribute directive ignored", IDENTIFIER_POINTER (name)); continue; } else if (list_length (args) < spec->min_length || (spec->max_length >= 0 && list_length (args) > spec->max_length)) { error ("wrong number of arguments specified for %qs attribute", IDENTIFIER_POINTER (name)); continue; } if (spec->decl_required && !DECL_P (*anode)) { if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT | (int) ATTR_FLAG_ARRAY_NEXT)) { /* Pass on this attribute to be tried again. */ returned_attrs = tree_cons (name, args, returned_attrs); continue; } else { warning (OPT_Wattributes, "%qs attribute does not apply to types", IDENTIFIER_POINTER (name)); continue; } } /* If we require a type, but were passed a decl, set up to make a new type and update the one in the decl. ATTR_FLAG_TYPE_IN_PLACE would have applied if we'd been passed a type, but we cannot modify the decl's type in place here. */ if (spec->type_required && DECL_P (*anode)) { anode = &TREE_TYPE (*anode); flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE; } if (spec->function_type_required && TREE_CODE (*anode) != FUNCTION_TYPE && TREE_CODE (*anode) != METHOD_TYPE) { if (TREE_CODE (*anode) == POINTER_TYPE && (TREE_CODE (TREE_TYPE (*anode)) == FUNCTION_TYPE || TREE_CODE (TREE_TYPE (*anode)) == METHOD_TYPE)) { /* OK, this is a bit convoluted. We can't just make a copy of the pointer type and modify its TREE_TYPE, because if we change the attributes of the target type the pointer type needs to have a different TYPE_MAIN_VARIANT. So we pull out the target type now, frob it as appropriate, and rebuild the pointer type later. This would all be simpler if attributes were part of the declarator, grumble grumble. */ fn_ptr_tmp = TREE_TYPE (*anode); anode = &fn_ptr_tmp; flags &= ~(int) ATTR_FLAG_TYPE_IN_PLACE; } else if (flags & (int) ATTR_FLAG_FUNCTION_NEXT) { /* Pass on this attribute to be tried again. */ returned_attrs = tree_cons (name, args, returned_attrs); continue; } if (TREE_CODE (*anode) != FUNCTION_TYPE && TREE_CODE (*anode) != METHOD_TYPE) { warning (OPT_Wattributes, "%qs attribute only applies to function types", IDENTIFIER_POINTER (name)); continue; } } if (TYPE_P (*anode) && (flags & (int) ATTR_FLAG_TYPE_IN_PLACE) && TYPE_SIZE (*anode) != NULL_TREE) { warning (OPT_Wattributes, "type attributes ignored after type is already defined"); continue; } if (spec->handler != NULL) returned_attrs = chainon ((*spec->handler) (anode, name, args, flags, &no_add_attrs), returned_attrs); /* Layout the decl in case anything changed. */ if (spec->type_required && DECL_P (*node) && (TREE_CODE (*node) == VAR_DECL || TREE_CODE (*node) == PARM_DECL || TREE_CODE (*node) == RESULT_DECL)) relayout_decl (*node); if (!no_add_attrs) { tree old_attrs; tree a; if (DECL_P (*anode)) old_attrs = DECL_ATTRIBUTES (*anode); else old_attrs = TYPE_ATTRIBUTES (*anode); for (a = lookup_attribute (spec->name, old_attrs); a != NULL_TREE; a = lookup_attribute (spec->name, TREE_CHAIN (a))) { if (simple_cst_equal (TREE_VALUE (a), args) == 1) break; } if (a == NULL_TREE) { /* This attribute isn't already in the list. */ if (DECL_P (*anode)) DECL_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs); else if (flags & (int) ATTR_FLAG_TYPE_IN_PLACE) { TYPE_ATTRIBUTES (*anode) = tree_cons (name, args, old_attrs); /* If this is the main variant, also push the attributes out to the other variants. */ if (*anode == TYPE_MAIN_VARIANT (*anode)) { tree variant; for (variant = *anode; variant; variant = TYPE_NEXT_VARIANT (variant)) { if (TYPE_ATTRIBUTES (variant) == old_attrs) TYPE_ATTRIBUTES (variant) = TYPE_ATTRIBUTES (*anode); else if (!lookup_attribute (spec->name, TYPE_ATTRIBUTES (variant))) TYPE_ATTRIBUTES (variant) = tree_cons (name, args, TYPE_ATTRIBUTES (variant)); } } } else *anode = build_type_attribute_variant (*anode, tree_cons (name, args, old_attrs)); } } if (fn_ptr_tmp) { /* Rebuild the function pointer type and put it in the appropriate place. */ fn_ptr_tmp = build_pointer_type (fn_ptr_tmp); if (DECL_P (*node)) TREE_TYPE (*node) = fn_ptr_tmp; else { gcc_assert (TREE_CODE (*node) == POINTER_TYPE); *node = fn_ptr_tmp; } } } return returned_attrs; }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, location_t location) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; gimple unregister_fncall, register_fncall; tree unregister_fncall_param, register_fncall_param; /* Variable-sized objects should have sizes already been gimplified when we got here. */ size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); gcc_assert (is_gimple_val (size)); unregister_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3, unregister_fncall_param, size, integer_three_node); variable_name = mf_varname_tree (decl); register_fncall_param = mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl)); /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ register_fncall = gimple_build_call (mf_register_fndecl, 4, register_fncall_param, size, integer_three_node, variable_name); /* Accumulate the two calls. */ gimple_set_location (register_fncall, location); gimple_set_location (unregister_fncall, location); /* Add the __mf_register call at the current appending point. */ if (gsi_end_p (initially_stmts)) { if (!mf_artificial (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ gimple_seq_add_stmt (&finally_stmts, unregister_fncall); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = NULL; gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
static void output_gimple_stmt (struct output_block *ob, gimple *stmt) { unsigned i; enum gimple_code code; enum LTO_tags tag; struct bitpack_d bp; histogram_value hist; /* Emit identifying tag. */ code = gimple_code (stmt); tag = lto_gimple_code_to_tag (code); streamer_write_record_start (ob, tag); /* Emit the tuple header. */ bp = bitpack_create (ob->main_stream); bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt)); bp_pack_value (&bp, gimple_no_warning_p (stmt), 1); if (is_gimple_assign (stmt)) bp_pack_value (&bp, gimple_assign_nontemporal_move_p ( as_a <gassign *> (stmt)), 1); bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1); hist = gimple_histogram_value (cfun, stmt); bp_pack_value (&bp, hist != NULL, 1); bp_pack_var_len_unsigned (&bp, stmt->subcode); /* Emit location information for the statement. */ stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt))); streamer_write_bitpack (&bp); /* Emit the lexical block holding STMT. */ stream_write_tree (ob, gimple_block (stmt), true); /* Emit the operands. */ switch (gimple_code (stmt)) { case GIMPLE_RESX: streamer_write_hwi (ob, gimple_resx_region (as_a <gresx *> (stmt))); break; case GIMPLE_EH_MUST_NOT_THROW: stream_write_tree (ob, gimple_eh_must_not_throw_fndecl ( as_a <geh_mnt *> (stmt)), true); break; case GIMPLE_EH_DISPATCH: streamer_write_hwi (ob, gimple_eh_dispatch_region ( as_a <geh_dispatch *> (stmt))); break; case GIMPLE_ASM: { gasm *asm_stmt = as_a <gasm *> (stmt); streamer_write_uhwi (ob, gimple_asm_ninputs (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_noutputs (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_nclobbers (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_nlabels (asm_stmt)); streamer_write_string (ob, ob->main_stream, gimple_asm_string (asm_stmt), true); } /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < gimple_num_ops (stmt); i++) { tree op = gimple_op (stmt, i); tree *basep = NULL; /* Wrap all uses of non-automatic variables inside MEM_REFs so that we do not have to deal with type mismatches on merged symbols during IL read in. The first operand of GIMPLE_DEBUG must be a decl, not MEM_REF, though. */ if (op && (i || !is_gimple_debug (stmt))) { basep = &op; if (TREE_CODE (*basep) == ADDR_EXPR) basep = &TREE_OPERAND (*basep, 0); while (handled_component_p (*basep)) basep = &TREE_OPERAND (*basep, 0); if (TREE_CODE (*basep) == VAR_DECL && !auto_var_in_fn_p (*basep, current_function_decl) && !DECL_REGISTER (*basep)) { bool volatilep = TREE_THIS_VOLATILE (*basep); tree ptrtype = build_pointer_type (TREE_TYPE (*basep)); *basep = build2 (MEM_REF, TREE_TYPE (*basep), build1 (ADDR_EXPR, ptrtype, *basep), build_int_cst (ptrtype, 0)); TREE_THIS_VOLATILE (*basep) = volatilep; } else basep = NULL; } stream_write_tree (ob, op, true); /* Restore the original base if we wrapped it inside a MEM_REF. */ if (basep) *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) streamer_write_enum (ob->main_stream, internal_fn, IFN_LAST, gimple_call_internal_fn (stmt)); else stream_write_tree (ob, gimple_call_fntype (stmt), true); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: { gtransaction *txn = as_a <gtransaction *> (stmt); gcc_assert (gimple_transaction_body (txn) == NULL); stream_write_tree (ob, gimple_transaction_label_norm (txn), true); stream_write_tree (ob, gimple_transaction_label_uninst (txn), true); stream_write_tree (ob, gimple_transaction_label_over (txn), true); } break; default: gcc_unreachable (); } if (hist) stream_out_histogram_value (ob, hist); }
static tree ubsan_maybe_instrument_reference_or_call (location_t loc, tree op, tree ptype, enum ubsan_null_ckind ckind) { if (!do_ubsan_in_current_function ()) return NULL_TREE; tree type = TREE_TYPE (ptype); tree orig_op = op; bool instrument = false; unsigned int mina = 0; if (flag_sanitize & SANITIZE_ALIGNMENT) { mina = min_align_of_type (type); if (mina <= 1) mina = 0; } while ((TREE_CODE (op) == NOP_EXPR || TREE_CODE (op) == NON_LVALUE_EXPR) && TREE_CODE (TREE_TYPE (op)) == POINTER_TYPE) op = TREE_OPERAND (op, 0); if (TREE_CODE (op) == NOP_EXPR && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE) { if (mina && mina > min_align_of_type (TREE_TYPE (TREE_TYPE (op)))) instrument = true; } else { if ((flag_sanitize & SANITIZE_NULL) && TREE_CODE (op) == ADDR_EXPR) { bool strict_overflow_p = false; /* tree_single_nonzero_warnv_p will not return true for non-weak non-automatic decls with -fno-delete-null-pointer-checks, which is disabled during -fsanitize=null. We don't want to instrument those, just weak vars though. */ int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks; flag_delete_null_pointer_checks = 1; if (!tree_single_nonzero_warnv_p (op, &strict_overflow_p) || strict_overflow_p) instrument = true; flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks; } else if (flag_sanitize & SANITIZE_NULL) instrument = true; if (mina && mina > 1) { if (!POINTER_TYPE_P (TREE_TYPE (op)) || mina > get_pointer_alignment (op) / BITS_PER_UNIT) instrument = true; } } if (!instrument) return NULL_TREE; op = save_expr (orig_op); gcc_assert (POINTER_TYPE_P (ptype)); if (TREE_CODE (ptype) == REFERENCE_TYPE) ptype = build_pointer_type (TREE_TYPE (ptype)); tree kind = build_int_cst (ptype, ckind); tree align = build_int_cst (pointer_sized_int_node, mina); tree call = build_call_expr_internal_loc (loc, IFN_UBSAN_NULL, void_type_node, 3, op, kind, align); TREE_SIDE_EFFECTS (call) = 1; return fold_build2 (COMPOUND_EXPR, TREE_TYPE (op), call, op); }
tree build_java_array_type (tree element_type, HOST_WIDE_INT length) { tree sig, t, fld, atype, arfld; char buf[12]; tree elsig = build_java_signature (element_type); tree el_name = element_type; buf[0] = '['; if (length >= 0) sprintf (buf+1, HOST_WIDE_INT_PRINT_DEC, length); else buf[1] = '\0'; sig = ident_subst (IDENTIFIER_POINTER (elsig), IDENTIFIER_LENGTH (elsig), buf, 0, 0, ""); t = IDENTIFIER_SIGNATURE_TYPE (sig); if (t != NULL_TREE) return TREE_TYPE (t); t = make_class (); IDENTIFIER_SIGNATURE_TYPE (sig) = build_pointer_type (t); TYPE_ARRAY_P (t) = 1; if (TREE_CODE (el_name) == POINTER_TYPE) el_name = TREE_TYPE (el_name); el_name = TYPE_NAME (el_name); if (TREE_CODE (el_name) == TYPE_DECL) el_name = DECL_NAME (el_name); { char suffix[12]; if (length >= 0) sprintf (suffix, "[%d]", (int)length); else strcpy (suffix, "[]"); TYPE_NAME (t) = build_decl (TYPE_DECL, identifier_subst (el_name, "", '.', '.', suffix), t); } set_java_signature (t, sig); set_super_info (0, t, object_type_node, 0); if (TREE_CODE (element_type) == RECORD_TYPE) element_type = promote_type (element_type); TYPE_ARRAY_ELEMENT (t) = element_type; /* Add length pseudo-field. */ fld = build_decl (FIELD_DECL, get_identifier ("length"), int_type_node); TYPE_FIELDS (t) = fld; DECL_CONTEXT (fld) = t; FIELD_PUBLIC (fld) = 1; FIELD_FINAL (fld) = 1; TREE_READONLY (fld) = 1; atype = build_prim_array_type (element_type, length); arfld = build_decl (FIELD_DECL, get_identifier ("data"), atype); DECL_CONTEXT (arfld) = t; TREE_CHAIN (fld) = arfld; DECL_ALIGN (arfld) = TYPE_ALIGN (element_type); /* We could layout_class, but that loads java.lang.Object prematurely. * This is called by the parser, and it is a bad idea to do load_class * in the middle of parsing, because of possible circularity problems. */ push_super_field (t, object_type_node); layout_type (t); return t; }
tree expand_start_catch_block (tree decl) { tree exp; tree type, init; if (! doing_eh ()) return NULL_TREE; if (decl) { if (!is_admissible_throw_operand_or_catch_parameter (decl, false)) decl = error_mark_node; type = prepare_eh_type (TREE_TYPE (decl)); mark_used (eh_type_info (type)); } else type = NULL_TREE; /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); init = do_begin_catch (); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL || decl == error_mark_node || init == error_mark_node) finish_expr_stmt (init); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); finish_expr_stmt (init); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { tree init_type = type; /* Pointers are passed by values, everything else by reference. */ if (!TYPE_PTR_P (type)) init_type = build_pointer_type (type); if (init_type != TREE_TYPE (init)) init = build1 (NOP_EXPR, init_type, init); exp = create_temporary_var (init_type); cp_finish_decl (exp, init, /*init_const_expr=*/false, NULL_TREE, LOOKUP_ONLYCONVERTING); DECL_REGISTER (exp) = 1; initialize_handler_parm (decl, exp); } return type; }
tree convert_to_reference (tree reftype, tree expr, int convtype, int flags, tree decl) { tree type = TYPE_MAIN_VARIANT (TREE_TYPE (reftype)); tree intype; tree rval = NULL_TREE; tree rval_as_conversion = NULL_TREE; bool can_convert_intype_to_type; if (TREE_CODE (type) == FUNCTION_TYPE && TREE_TYPE (expr) == unknown_type_node) expr = instantiate_type (type, expr, (flags & LOOKUP_COMPLAIN) ? tf_error | tf_warning : tf_none); else expr = convert_from_reference (expr); if (expr == error_mark_node) return error_mark_node; intype = TREE_TYPE (expr); my_friendly_assert (TREE_CODE (intype) != REFERENCE_TYPE, 364); intype = TYPE_MAIN_VARIANT (intype); can_convert_intype_to_type = can_convert (type, intype); if (!can_convert_intype_to_type && (convtype & CONV_IMPLICIT) && IS_AGGR_TYPE (intype) && ! (flags & LOOKUP_NO_CONVERSION)) { /* Look for a user-defined conversion to lvalue that we can use. */ rval_as_conversion = build_type_conversion (reftype, expr); if (rval_as_conversion && rval_as_conversion != error_mark_node && real_lvalue_p (rval_as_conversion)) { expr = rval_as_conversion; rval_as_conversion = NULL_TREE; intype = type; can_convert_intype_to_type = 1; } } if (((convtype & CONV_STATIC) && can_convert (intype, type)) || ((convtype & CONV_IMPLICIT) && can_convert_intype_to_type)) { if (flags & LOOKUP_COMPLAIN) { tree ttl = TREE_TYPE (reftype); tree ttr = lvalue_type (expr); if (! real_lvalue_p (expr)) warn_ref_binding (reftype, intype, decl); if (! (convtype & CONV_CONST) && !at_least_as_qualified_p (ttl, ttr)) pedwarn ("conversion from `%T' to `%T' discards qualifiers", ttr, reftype); } return build_up_reference (reftype, expr, flags, decl); } else if ((convtype & CONV_REINTERPRET) && lvalue_p (expr)) { /* When casting an lvalue to a reference type, just convert into a pointer to the new type and deference it. This is allowed by San Diego WP section 5.2.9 paragraph 12, though perhaps it should be done directly (jason). (int &)ri ---> *(int*)&ri */ /* B* bp; A& ar = (A&)bp; is valid, but it's probably not what they meant. */ if (TREE_CODE (intype) == POINTER_TYPE && (comptypes (TREE_TYPE (intype), type, COMPARE_BASE | COMPARE_DERIVED))) warning ("casting `%T' to `%T' does not dereference pointer", intype, reftype); rval = build_unary_op (ADDR_EXPR, expr, 0); if (rval != error_mark_node) rval = convert_force (build_pointer_type (TREE_TYPE (reftype)), rval, 0); if (rval != error_mark_node) rval = build1 (NOP_EXPR, reftype, rval); } else { rval = convert_for_initialization (NULL_TREE, type, expr, flags, "converting", 0, 0); if (rval == NULL_TREE || rval == error_mark_node) return rval; warn_ref_binding (reftype, intype, decl); rval = build_up_reference (reftype, rval, flags, decl); } if (rval) { /* If we found a way to convert earlier, then use it. */ return rval; } if (flags & LOOKUP_COMPLAIN) error ("cannot convert type `%T' to type `%T'", intype, reftype); if (flags & LOOKUP_SPECULATIVELY) return NULL_TREE; return error_mark_node; }
/* Return the tree for an expresssion, type EXP_TYPE (see treetree.h) with tree type TYPE and with operands1 OP1, OP2 (maybe), OP3 (maybe). */ tree tree_code_get_expression (unsigned int exp_type, tree type, tree op1, tree op2, tree op3 ATTRIBUTE_UNUSED) { tree ret1; int operator; switch (exp_type) { case EXP_ASSIGN: if (!op1 || !op2) abort (); operator = MODIFY_EXPR; ret1 = build (operator, type, op1, build1 (CONVERT_EXPR, type, op2)); break; case EXP_PLUS: operator = PLUS_EXPR; goto binary_expression; case EXP_MINUS: operator = MINUS_EXPR; goto binary_expression; case EXP_EQUALS: operator = EQ_EXPR; goto binary_expression; /* Expand a binary expression. Ensure the operands are the right type. */ binary_expression: if (!op1 || !op2) abort (); ret1 = build (operator, type, build1 (CONVERT_EXPR, type, op1), build1 (CONVERT_EXPR, type, op2)); break; /* Reference to a variable. This is dead easy, just return the decl for the variable. If the TYPE is different than the variable type, convert it. */ case EXP_REFERENCE: if (!op1) abort (); if (type == TREE_TYPE (op1)) ret1 = op1; else ret1 = build1 (CONVERT_EXPR, type, op1); break; case EXP_FUNCTION_INVOCATION: if (!op1 || !op2) abort (); { tree fun_ptr; fun_ptr = build1 (ADDR_EXPR, build_pointer_type (type), op1); ret1 = build (CALL_EXPR, type, fun_ptr, nreverse (op2)); } break; default: abort (); } return ret1; }
/* Initialize the builtins. */ void initialize_builtins (void) { tree double_ftype_double, double_ftype_double_double; tree float_ftype_float_float; tree boolean_ftype_boolean_boolean; int i; for (i = 0; java_builtins[i].builtin_code != END_BUILTINS; ++i) { tree klass_id = get_identifier (java_builtins[i].class_name.s); tree m = get_identifier (java_builtins[i].method_name.s); java_builtins[i].class_name.t = klass_id; java_builtins[i].method_name.t = m; } void_list_node = end_params_node; float_ftype_float_float = build_function_type_list (float_type_node, float_type_node, float_type_node, NULL_TREE); double_ftype_double = build_function_type_list (double_type_node, double_type_node, NULL_TREE); double_ftype_double_double = build_function_type_list (double_type_node, double_type_node, double_type_node, NULL_TREE); define_builtin (BUILT_IN_FMOD, "__builtin_fmod", double_ftype_double_double, "fmod", ECF_CONST); define_builtin (BUILT_IN_FMODF, "__builtin_fmodf", float_ftype_float_float, "fmodf", ECF_CONST); define_builtin (BUILT_IN_ACOS, "__builtin_acos", double_ftype_double, "_ZN4java4lang4Math4acosEJdd", ECF_CONST); define_builtin (BUILT_IN_ASIN, "__builtin_asin", double_ftype_double, "_ZN4java4lang4Math4asinEJdd", ECF_CONST); define_builtin (BUILT_IN_ATAN, "__builtin_atan", double_ftype_double, "_ZN4java4lang4Math4atanEJdd", ECF_CONST); define_builtin (BUILT_IN_ATAN2, "__builtin_atan2", double_ftype_double_double, "_ZN4java4lang4Math5atan2EJddd", ECF_CONST); define_builtin (BUILT_IN_CEIL, "__builtin_ceil", double_ftype_double, "_ZN4java4lang4Math4ceilEJdd", ECF_CONST); define_builtin (BUILT_IN_COS, "__builtin_cos", double_ftype_double, "_ZN4java4lang4Math3cosEJdd", ECF_CONST); define_builtin (BUILT_IN_EXP, "__builtin_exp", double_ftype_double, "_ZN4java4lang4Math3expEJdd", ECF_CONST); define_builtin (BUILT_IN_FLOOR, "__builtin_floor", double_ftype_double, "_ZN4java4lang4Math5floorEJdd", ECF_CONST); define_builtin (BUILT_IN_LOG, "__builtin_log", double_ftype_double, "_ZN4java4lang4Math3logEJdd", ECF_CONST); define_builtin (BUILT_IN_POW, "__builtin_pow", double_ftype_double_double, "_ZN4java4lang4Math3powEJddd", ECF_CONST); define_builtin (BUILT_IN_SIN, "__builtin_sin", double_ftype_double, "_ZN4java4lang4Math3sinEJdd", ECF_CONST); define_builtin (BUILT_IN_SQRT, "__builtin_sqrt", double_ftype_double, "_ZN4java4lang4Math4sqrtEJdd", ECF_CONST); define_builtin (BUILT_IN_TAN, "__builtin_tan", double_ftype_double, "_ZN4java4lang4Math3tanEJdd", ECF_CONST); boolean_ftype_boolean_boolean = build_function_type_list (boolean_type_node, boolean_type_node, boolean_type_node, NULL_TREE); define_builtin (BUILT_IN_EXPECT, "__builtin_expect", boolean_ftype_boolean_boolean, "__builtin_expect", ECF_CONST | ECF_NOTHROW); define_builtin (BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4, "__sync_bool_compare_and_swap_4", build_function_type_list (boolean_type_node, int_type_node, build_pointer_type (int_type_node), int_type_node, NULL_TREE), "__sync_bool_compare_and_swap_4", ECF_NOTHROW | ECF_LEAF); define_builtin (BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8, "__sync_bool_compare_and_swap_8", build_function_type_list (boolean_type_node, long_type_node, build_pointer_type (long_type_node), int_type_node, NULL_TREE), "__sync_bool_compare_and_swap_8", ECF_NOTHROW | ECF_LEAF); define_builtin (BUILT_IN_SYNC_SYNCHRONIZE, "__sync_synchronize", build_function_type_list (void_type_node, NULL_TREE), "__sync_synchronize", ECF_NOTHROW | ECF_LEAF); define_builtin (BUILT_IN_RETURN_ADDRESS, "__builtin_return_address", build_function_type_list (ptr_type_node, int_type_node, NULL_TREE), "__builtin_return_address", ECF_NOTHROW | ECF_LEAF); define_builtin (BUILT_IN_TRAP, "__builtin_trap", build_function_type_list (void_type_node, NULL_TREE), "__builtin_trap", ECF_NOTHROW | ECF_LEAF | ECF_NORETURN); build_common_builtin_nodes (); }
void gimple_init_edge_profiler (void) { tree interval_profiler_fn_type; tree pow2_profiler_fn_type; tree one_value_profiler_fn_type; tree gcov_type_ptr; tree ic_profiler_fn_type; tree average_profiler_fn_type; if (!gcov_type_node) { gcov_type_node = get_gcov_type (); gcov_type_ptr = build_pointer_type (gcov_type_node); /* void (*) (gcov_type *, gcov_type, int, unsigned) */ interval_profiler_fn_type = build_function_type_list (void_type_node, gcov_type_ptr, gcov_type_node, integer_type_node, unsigned_type_node, NULL_TREE); tree_interval_profiler_fn = build_fn_decl ("__gcov_interval_profiler", interval_profiler_fn_type); TREE_NOTHROW (tree_interval_profiler_fn) = 1; DECL_ATTRIBUTES (tree_interval_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_interval_profiler_fn)); /* void (*) (gcov_type *, gcov_type) */ pow2_profiler_fn_type = build_function_type_list (void_type_node, gcov_type_ptr, gcov_type_node, NULL_TREE); tree_pow2_profiler_fn = build_fn_decl ("__gcov_pow2_profiler", pow2_profiler_fn_type); TREE_NOTHROW (tree_pow2_profiler_fn) = 1; DECL_ATTRIBUTES (tree_pow2_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_pow2_profiler_fn)); /* void (*) (gcov_type *, gcov_type) */ one_value_profiler_fn_type = build_function_type_list (void_type_node, gcov_type_ptr, gcov_type_node, NULL_TREE); tree_one_value_profiler_fn = build_fn_decl ("__gcov_one_value_profiler", one_value_profiler_fn_type); TREE_NOTHROW (tree_one_value_profiler_fn) = 1; DECL_ATTRIBUTES (tree_one_value_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_one_value_profiler_fn)); init_ic_make_global_vars (); /* void (*) (gcov_type *, gcov_type, void *, void *) */ ic_profiler_fn_type = build_function_type_list (void_type_node, gcov_type_ptr, gcov_type_node, ptr_void, ptr_void, NULL_TREE); tree_indirect_call_profiler_fn = build_fn_decl ("__gcov_indirect_call_profiler", ic_profiler_fn_type); TREE_NOTHROW (tree_indirect_call_profiler_fn) = 1; DECL_ATTRIBUTES (tree_indirect_call_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_indirect_call_profiler_fn)); /* void (*) (gcov_type *, gcov_type) */ average_profiler_fn_type = build_function_type_list (void_type_node, gcov_type_ptr, gcov_type_node, NULL_TREE); tree_average_profiler_fn = build_fn_decl ("__gcov_average_profiler", average_profiler_fn_type); TREE_NOTHROW (tree_average_profiler_fn) = 1; DECL_ATTRIBUTES (tree_average_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_average_profiler_fn)); tree_ior_profiler_fn = build_fn_decl ("__gcov_ior_profiler", average_profiler_fn_type); TREE_NOTHROW (tree_ior_profiler_fn) = 1; DECL_ATTRIBUTES (tree_ior_profiler_fn) = tree_cons (get_identifier ("leaf"), NULL, DECL_ATTRIBUTES (tree_ior_profiler_fn)); /* LTO streamer needs assembler names. Because we create these decls late, we need to initialize them by hand. */ DECL_ASSEMBLER_NAME (tree_interval_profiler_fn); DECL_ASSEMBLER_NAME (tree_pow2_profiler_fn); DECL_ASSEMBLER_NAME (tree_one_value_profiler_fn); DECL_ASSEMBLER_NAME (tree_indirect_call_profiler_fn); DECL_ASSEMBLER_NAME (tree_average_profiler_fn); DECL_ASSEMBLER_NAME (tree_ior_profiler_fn); } }
tree expand_start_catch_block (tree decl) { tree exp; tree type, init; if (! doing_eh ()) return NULL_TREE; /* Make sure this declaration is reasonable. */ if (decl && !complete_ptr_ref_or_void_ptr_p (TREE_TYPE (decl), NULL_TREE)) decl = error_mark_node; if (decl) type = prepare_eh_type (TREE_TYPE (decl)); else type = NULL_TREE; if (decl && decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ exp = build_exc_ptr (); exp = build1 (NOP_EXPR, build_pointer_type (type), exp); exp = fold_build_pointer_plus (exp, fold_build1_loc (input_location, NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (TREE_TYPE (exp)))); exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error); initialize_handler_parm (decl, exp); return type; } /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); init = do_begin_catch (); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL || decl == error_mark_node || init == error_mark_node) finish_expr_stmt (init); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); finish_expr_stmt (init); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { tree init_type = type; /* Pointers are passed by values, everything else by reference. */ if (!TYPE_PTR_P (type)) init_type = build_pointer_type (type); if (init_type != TREE_TYPE (init)) init = build1 (NOP_EXPR, init_type, init); exp = create_temporary_var (init_type); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, /*init_const_expr=*/false, NULL_TREE, LOOKUP_ONLYCONVERTING); initialize_handler_parm (decl, exp); } return type; }
tree ubsan_create_data (const char *name, location_t loc, const struct ubsan_mismatch_data *mismatch, ...) { va_list args; tree ret, t; tree fields[5]; vec<tree, va_gc> *saved_args = NULL; size_t i = 0; /* Firstly, create a pointer to type descriptor type. */ tree td_type = ubsan_type_descriptor_type (); TYPE_READONLY (td_type) = 1; td_type = build_pointer_type (td_type); loc = LOCATION_LOCUS (loc); /* Create the structure type. */ ret = make_node (RECORD_TYPE); if (loc != UNKNOWN_LOCATION) { fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, ubsan_source_location_type ()); DECL_CONTEXT (fields[i]) = ret; i++; } va_start (args, mismatch); for (t = va_arg (args, tree); t != NULL_TREE; i++, t = va_arg (args, tree)) { gcc_checking_assert (i < 3); /* Save the tree arguments for later use. */ vec_safe_push (saved_args, t); fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, td_type); DECL_CONTEXT (fields[i]) = ret; if (i) DECL_CHAIN (fields[i - 1]) = fields[i]; } va_end (args); if (mismatch != NULL) { /* We have to add two more decls. */ fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, pointer_sized_int_node); DECL_CONTEXT (fields[i]) = ret; DECL_CHAIN (fields[i - 1]) = fields[i]; i++; fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, unsigned_char_type_node); DECL_CONTEXT (fields[i]) = ret; DECL_CHAIN (fields[i - 1]) = fields[i]; i++; } TYPE_FIELDS (ret) = fields[0]; TYPE_NAME (ret) = get_identifier (name); layout_type (ret); /* Now, fill in the type. */ char tmp_name[32]; static unsigned int ubsan_var_id_num; ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++); tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name), ret); TREE_STATIC (var) = 1; TREE_PUBLIC (var) = 0; DECL_ARTIFICIAL (var) = 1; DECL_IGNORED_P (var) = 1; DECL_EXTERNAL (var) = 0; vec<constructor_elt, va_gc> *v; vec_alloc (v, i); tree ctor = build_constructor (ret, v); /* If desirable, set the __ubsan_source_location element. */ if (loc != UNKNOWN_LOCATION) CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc)); size_t nelts = vec_safe_length (saved_args); for (i = 0; i < nelts; i++) { t = (*saved_args)[i]; CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t); } if (mismatch != NULL) { /* Append the pointer data. */ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, mismatch->align); CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, mismatch->ckind); } TREE_CONSTANT (ctor) = 1; TREE_STATIC (ctor) = 1; DECL_INITIAL (var) = ctor; rest_of_decl_compilation (var, 1, 0); return var; }
tree gfc_build_array_ref (tree base, tree offset, tree decl) { tree type = TREE_TYPE (base); tree tmp; tree span; if (GFC_ARRAY_TYPE_P (type) && GFC_TYPE_ARRAY_RANK (type) == 0) { gcc_assert (GFC_TYPE_ARRAY_CORANK (type) > 0); return fold_convert (TYPE_MAIN_VARIANT (type), base); } /* Scalar coarray, there is nothing to do. */ if (TREE_CODE (type) != ARRAY_TYPE) { gcc_assert (decl == NULL_TREE); gcc_assert (integer_zerop (offset)); return base; } type = TREE_TYPE (type); if (DECL_P (base)) TREE_ADDRESSABLE (base) = 1; /* Strip NON_LVALUE_EXPR nodes. */ STRIP_TYPE_NOPS (offset); /* If the array reference is to a pointer, whose target contains a subreference, use the span that is stored with the backend decl and reference the element with pointer arithmetic. */ if (decl && (TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) && ((GFC_DECL_SUBREF_ARRAY_P (decl) && !integer_zerop (GFC_DECL_SPAN(decl))) || GFC_DECL_CLASS (decl))) { if (GFC_DECL_CLASS (decl)) { /* Allow for dummy arguments and other good things. */ if (POINTER_TYPE_P (TREE_TYPE (decl))) decl = build_fold_indirect_ref_loc (input_location, decl); /* Check if '_data' is an array descriptor. If it is not, the array must be one of the components of the class object, so return a normal array reference. */ if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (gfc_class_data_get (decl)))) return build4_loc (input_location, ARRAY_REF, type, base, offset, NULL_TREE, NULL_TREE); span = gfc_vtable_size_get (decl); } else if (GFC_DECL_SUBREF_ARRAY_P (decl)) span = GFC_DECL_SPAN(decl); else gcc_unreachable (); offset = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type, offset, span); tmp = gfc_build_addr_expr (pvoid_type_node, base); tmp = fold_build_pointer_plus_loc (input_location, tmp, offset); tmp = fold_convert (build_pointer_type (type), tmp); if (!TYPE_STRING_FLAG (type)) tmp = build_fold_indirect_ref_loc (input_location, tmp); return tmp; } else /* Otherwise use a straightforward array reference. */ return build4_loc (input_location, ARRAY_REF, type, base, offset, NULL_TREE, NULL_TREE); }
tree build_throw (tree exp) { tree fn; if (exp == error_mark_node) return exp; if (processing_template_decl) { if (cfun) current_function_returns_abnormally = 1; exp = build_min (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; } if (exp == null_node) warning (0, "throwing NULL, which has integral, not pointer type"); if (exp != NULL_TREE) { if (!is_admissible_throw_operand_or_catch_parameter (exp, true)) return error_mark_node; } if (! doing_eh ()) return error_mark_node; if (exp && decl_is_java_type (TREE_TYPE (exp), 1)) { tree fn = get_identifier ("_Jv_Throw"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void _Jv_Throw (void *). */ tree tmp; tmp = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE); fn = push_throw_library_fn (fn, tmp); } else if (really_overloaded_fn (fn)) { error ("%qD should never be overloaded", fn); return error_mark_node; } fn = OVL_CURRENT (fn); exp = cp_build_function_call_nary (fn, tf_warning_or_error, exp, NULL_TREE); } else if (exp) { tree throw_type; tree temp_type; tree cleanup; tree object, ptr; tree tmp; tree allocate_expr; /* The CLEANUP_TYPE is the internal type of a destructor. */ if (!cleanup_type) { tmp = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); cleanup_type = build_pointer_type (tmp); } fn = get_identifier ("__cxa_throw"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void __cxa_throw (void*, void*, void (*)(void*)). */ /* ??? Second argument is supposed to be "std::type_info*". */ tmp = build_function_type_list (void_type_node, ptr_type_node, ptr_type_node, cleanup_type, NULL_TREE); fn = push_throw_library_fn (fn, tmp); if (flag_tm) { tree fn2 = get_identifier ("_ITM_cxa_throw"); if (!get_global_value_if_present (fn2, &fn2)) fn2 = push_throw_library_fn (fn2, tmp); apply_tm_attr (fn2, get_identifier ("transaction_pure")); record_tm_replacement (fn, fn2); } } /* [except.throw] A throw-expression initializes a temporary object, the type of which is determined by removing any top-level cv-qualifiers from the static type of the operand of throw and adjusting the type from "array of T" or "function return T" to "pointer to T" or "pointer to function returning T" respectively. */ temp_type = is_bitfield_expr_with_lowered_type (exp); if (!temp_type) temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp))); /* OK, this is kind of wacky. The standard says that we call terminate when the exception handling mechanism, after completing evaluation of the expression to be thrown but before the exception is caught (_except.throw_), calls a user function that exits via an uncaught exception. So we have to protect the actual initialization of the exception object with terminate(), but evaluate the expression first. Since there could be temps in the expression, we need to handle that, too. We also expand the call to __cxa_allocate_exception first (which doesn't matter, since it can't throw). */ /* Allocate the space for the exception. */ allocate_expr = do_allocate_exception (temp_type); allocate_expr = get_target_expr (allocate_expr); ptr = TARGET_EXPR_SLOT (allocate_expr); TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr); CLEANUP_EH_ONLY (allocate_expr) = 1; object = build_nop (build_pointer_type (temp_type), ptr); object = cp_build_indirect_ref (object, RO_NULL, tf_warning_or_error); /* And initialize the exception object. */ if (CLASS_TYPE_P (temp_type)) { int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING; vec<tree, va_gc> *exp_vec; /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes treated as an rvalue for the purposes of overload resolution to favor move constructors over copy constructors. */ if (/* Must be a local, automatic variable. */ TREE_CODE (exp) == VAR_DECL && DECL_CONTEXT (exp) == current_function_decl && ! TREE_STATIC (exp) /* The variable must not have the `volatile' qualifier. */ && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE)) flags = flags | LOOKUP_PREFER_RVALUE; /* Call the copy constructor. */ exp_vec = make_tree_vector_single (exp); exp = (build_special_member_call (object, complete_ctor_identifier, &exp_vec, TREE_TYPE (object), flags, tf_warning_or_error)); release_tree_vector (exp_vec); if (exp == error_mark_node) { error (" in thrown expression"); return error_mark_node; } } else { tmp = decay_conversion (exp, tf_warning_or_error); if (tmp == error_mark_node) return error_mark_node; exp = build2 (INIT_EXPR, temp_type, object, tmp); } /* Mark any cleanups from the initialization as MUST_NOT_THROW, since they are run after the exception object is initialized. */ cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0); /* Prepend the allocation. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp); /* Force all the cleanups to be evaluated here so that we don't have to do them during unwinding. */ exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp); throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object))); if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object))) { cleanup = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)), complete_dtor_identifier, 0); cleanup = BASELINK_FUNCTIONS (cleanup); mark_used (cleanup); cxx_mark_addressable (cleanup); /* Pretend it's a normal function. */ cleanup = build1 (ADDR_EXPR, cleanup_type, cleanup); } else cleanup = build_int_cst (cleanup_type, 0); /* ??? Indicate that this function call throws throw_type. */ tmp = cp_build_function_call_nary (fn, tf_warning_or_error, ptr, throw_type, cleanup, NULL_TREE); /* Tack on the initialization stuff. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp); } else { /* Rethrow current exception. */ tree fn = get_identifier ("__cxa_rethrow"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void __cxa_rethrow (void). */ fn = push_throw_library_fn (fn, build_function_type_list (void_type_node, NULL_TREE)); } if (flag_tm) apply_tm_attr (fn, get_identifier ("transaction_pure")); /* ??? Indicate that this function call allows exceptions of the type of the enclosing catch block (if known). */ exp = cp_build_function_call_vec (fn, NULL, tf_warning_or_error); } exp = build1 (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; }
static void gfc_init_builtin_functions (void) { enum builtin_type { #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME, #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME, #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME, #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME, #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME, #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME, #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME, #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) NAME, #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) NAME, #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) NAME, #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME, #define DEF_POINTER_TYPE(NAME, TYPE) NAME, #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_POINTER_TYPE BT_LAST }; tree mfunc_float[6]; tree mfunc_double[6]; tree mfunc_longdouble[6]; tree mfunc_cfloat[6]; tree mfunc_cdouble[6]; tree mfunc_clongdouble[6]; tree func_cfloat_float, func_float_cfloat; tree func_cdouble_double, func_double_cdouble; tree func_clongdouble_longdouble, func_longdouble_clongdouble; tree func_float_floatp_floatp; tree func_double_doublep_doublep; tree func_longdouble_longdoublep_longdoublep; tree ftype, ptype; tree builtin_types[(int) BT_LAST + 1]; int attr; build_builtin_fntypes (mfunc_float, float_type_node); build_builtin_fntypes (mfunc_double, double_type_node); build_builtin_fntypes (mfunc_longdouble, long_double_type_node); build_builtin_fntypes (mfunc_cfloat, complex_float_type_node); build_builtin_fntypes (mfunc_cdouble, complex_double_type_node); build_builtin_fntypes (mfunc_clongdouble, complex_long_double_type_node); func_cfloat_float = build_function_type_list (float_type_node, complex_float_type_node, NULL_TREE); func_float_cfloat = build_function_type_list (complex_float_type_node, float_type_node, NULL_TREE); func_cdouble_double = build_function_type_list (double_type_node, complex_double_type_node, NULL_TREE); func_double_cdouble = build_function_type_list (complex_double_type_node, double_type_node, NULL_TREE); func_clongdouble_longdouble = build_function_type_list (long_double_type_node, complex_long_double_type_node, NULL_TREE); func_longdouble_clongdouble = build_function_type_list (complex_long_double_type_node, long_double_type_node, NULL_TREE); ptype = build_pointer_type (float_type_node); func_float_floatp_floatp = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (double_type_node); func_double_doublep_doublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (long_double_type_node); func_longdouble_longdoublep_longdoublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); /* Non-math builtins are defined manually, so they're not included here. */ #define OTHER_BUILTIN(ID,NAME,TYPE,CONST) #include "mathbuiltins.def" gfc_define_builtin ("__builtin_roundl", mfunc_longdouble[0], BUILT_IN_ROUNDL, "roundl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_round", mfunc_double[0], BUILT_IN_ROUND, "round", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_roundf", mfunc_float[0], BUILT_IN_ROUNDF, "roundf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncl", mfunc_longdouble[0], BUILT_IN_TRUNCL, "truncl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_trunc", mfunc_double[0], BUILT_IN_TRUNC, "trunc", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncf", mfunc_float[0], BUILT_IN_TRUNCF, "truncf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsl", func_clongdouble_longdouble, BUILT_IN_CABSL, "cabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabs", func_cdouble_double, BUILT_IN_CABS, "cabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsf", func_cfloat_float, BUILT_IN_CABSF, "cabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignl", mfunc_longdouble[1], BUILT_IN_COPYSIGNL, "copysignl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysign", mfunc_double[1], BUILT_IN_COPYSIGN, "copysign", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignf", mfunc_float[1], BUILT_IN_COPYSIGNF, "copysignf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterl", mfunc_longdouble[1], BUILT_IN_NEXTAFTERL, "nextafterl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafter", mfunc_double[1], BUILT_IN_NEXTAFTER, "nextafter", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterf", mfunc_float[1], BUILT_IN_NEXTAFTERF, "nextafterf", ATTR_CONST_NOTHROW_LEAF_LIST); /* Some built-ins depend on rounding mode. Depending on compilation options, they will be "pure" or "const". */ attr = flag_rounding_math ? ATTR_PURE_NOTHROW_LEAF_LIST : ATTR_CONST_NOTHROW_LEAF_LIST; gfc_define_builtin ("__builtin_rintl", mfunc_longdouble[0], BUILT_IN_RINTL, "rintl", attr); gfc_define_builtin ("__builtin_rint", mfunc_double[0], BUILT_IN_RINT, "rint", attr); gfc_define_builtin ("__builtin_rintf", mfunc_float[0], BUILT_IN_RINTF, "rintf", attr); gfc_define_builtin ("__builtin_remainderl", mfunc_longdouble[1], BUILT_IN_REMAINDERL, "remainderl", attr); gfc_define_builtin ("__builtin_remainder", mfunc_double[1], BUILT_IN_REMAINDER, "remainder", attr); gfc_define_builtin ("__builtin_remainderf", mfunc_float[1], BUILT_IN_REMAINDERF, "remainderf", attr); gfc_define_builtin ("__builtin_logbl", mfunc_longdouble[0], BUILT_IN_LOGBL, "logbl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_logb", mfunc_double[0], BUILT_IN_LOGB, "logb", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_logbf", mfunc_float[0], BUILT_IN_LOGBF, "logbf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpl", mfunc_longdouble[4], BUILT_IN_FREXPL, "frexpl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexp", mfunc_double[4], BUILT_IN_FREXP, "frexp", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpf", mfunc_float[4], BUILT_IN_FREXPF, "frexpf", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsl", mfunc_longdouble[0], BUILT_IN_FABSL, "fabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabs", mfunc_double[0], BUILT_IN_FABS, "fabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsf", mfunc_float[0], BUILT_IN_FABSF, "fabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnl", mfunc_longdouble[5], BUILT_IN_SCALBNL, "scalbnl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbn", mfunc_double[5], BUILT_IN_SCALBN, "scalbn", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnf", mfunc_float[5], BUILT_IN_SCALBNF, "scalbnf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodl", mfunc_longdouble[1], BUILT_IN_FMODL, "fmodl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmod", mfunc_double[1], BUILT_IN_FMOD, "fmod", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodf", mfunc_float[1], BUILT_IN_FMODF, "fmodf", ATTR_CONST_NOTHROW_LEAF_LIST); /* iround{f,,l}, lround{f,,l} and llround{f,,l} */ ftype = build_function_type_list (integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundf", ftype, BUILT_IN_IROUNDF, "iroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundf", ftype, BUILT_IN_LROUNDF, "lroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundf", ftype, BUILT_IN_LLROUNDF, "llroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iround", ftype, BUILT_IN_IROUND, "iround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lround", ftype, BUILT_IN_LROUND, "lround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llround", ftype, BUILT_IN_LLROUND, "llround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundl", ftype, BUILT_IN_IROUNDL, "iroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundl", ftype, BUILT_IN_LROUNDL, "lroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundl", ftype, BUILT_IN_LLROUNDL, "llroundl", ATTR_CONST_NOTHROW_LEAF_LIST); /* These are used to implement the ** operator. */ gfc_define_builtin ("__builtin_powl", mfunc_longdouble[1], BUILT_IN_POWL, "powl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_pow", mfunc_double[1], BUILT_IN_POW, "pow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powf", mfunc_float[1], BUILT_IN_POWF, "powf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowl", mfunc_clongdouble[1], BUILT_IN_CPOWL, "cpowl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpow", mfunc_cdouble[1], BUILT_IN_CPOW, "cpow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowf", mfunc_cfloat[1], BUILT_IN_CPOWF, "cpowf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powil", mfunc_longdouble[2], BUILT_IN_POWIL, "powil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powi", mfunc_double[2], BUILT_IN_POWI, "powi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powif", mfunc_float[2], BUILT_IN_POWIF, "powif", ATTR_CONST_NOTHROW_LEAF_LIST); if (targetm.libc_has_function (function_c99_math_complex)) { gfc_define_builtin ("__builtin_cbrtl", mfunc_longdouble[0], BUILT_IN_CBRTL, "cbrtl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrt", mfunc_double[0], BUILT_IN_CBRT, "cbrt", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrtf", mfunc_float[0], BUILT_IN_CBRTF, "cbrtf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpil", func_longdouble_clongdouble, BUILT_IN_CEXPIL, "cexpil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpi", func_double_cdouble, BUILT_IN_CEXPI, "cexpi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpif", func_float_cfloat, BUILT_IN_CEXPIF, "cexpif", ATTR_CONST_NOTHROW_LEAF_LIST); } if (targetm.libc_has_function (function_sincos)) { gfc_define_builtin ("__builtin_sincosl", func_longdouble_longdoublep_longdoublep, BUILT_IN_SINCOSL, "sincosl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincos", func_double_doublep_doublep, BUILT_IN_SINCOS, "sincos", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincosf", func_float_floatp_floatp, BUILT_IN_SINCOSF, "sincosf", ATTR_NOTHROW_LEAF_LIST); } /* For LEADZ, TRAILZ, POPCNT and POPPAR. */ ftype = build_function_type_list (integer_type_node, unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "__builtin_clz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctz", ftype, BUILT_IN_CTZ, "__builtin_ctz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parity", ftype, BUILT_IN_PARITY, "__builtin_parity", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcount", ftype, BUILT_IN_POPCOUNT, "__builtin_popcount", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "__builtin_clzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzl", ftype, BUILT_IN_CTZL, "__builtin_ctzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityl", ftype, BUILT_IN_PARITYL, "__builtin_parityl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountl", ftype, BUILT_IN_POPCOUNTL, "__builtin_popcountl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "__builtin_clzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzll", ftype, BUILT_IN_CTZLL, "__builtin_ctzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityll", ftype, BUILT_IN_PARITYLL, "__builtin_parityll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountll", ftype, BUILT_IN_POPCOUNTLL, "__builtin_popcountll", ATTR_CONST_NOTHROW_LEAF_LIST); /* Other builtin functions we use. */ ftype = build_function_type_list (long_integer_type_node, long_integer_type_node, long_integer_type_node, NULL_TREE); gfc_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT, "__builtin_expect", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (void_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_free", ftype, BUILT_IN_FREE, "free", ATTR_NOTHROW_LEAF_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_malloc", ftype, BUILT_IN_MALLOC, "malloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_calloc", ftype, BUILT_IN_CALLOC, "calloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); DECL_IS_MALLOC (builtin_decl_explicit (BUILT_IN_CALLOC)) = 1; ftype = build_function_type_list (pvoid_type_node, size_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_realloc", ftype, BUILT_IN_REALLOC, "realloc", ATTR_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, void_type_node, NULL_TREE); gfc_define_builtin ("__builtin_isnan", ftype, BUILT_IN_ISNAN, "__builtin_isnan", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isfinite", ftype, BUILT_IN_ISFINITE, "__builtin_isfinite", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isnormal", ftype, BUILT_IN_ISNORMAL, "__builtin_isnormal", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, void_type_node, void_type_node, NULL_TREE); gfc_define_builtin ("__builtin_isunordered", ftype, BUILT_IN_ISUNORDERED, "__builtin_isunordered", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_islessequal", ftype, BUILT_IN_ISLESSEQUAL, "__builtin_islessequal", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isgreaterequal", ftype, BUILT_IN_ISGREATEREQUAL, "__builtin_isgreaterequal", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin("__builtin_signbitf", ftype, BUILT_IN_SIGNBITF, "signbitf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin("__builtin_signbit", ftype, BUILT_IN_SIGNBIT, "signbit", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin("__builtin_signbitl", ftype, BUILT_IN_SIGNBITL, "signbitl", ATTR_CONST_NOTHROW_LEAF_LIST); #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \ builtin_types[(int) ENUM] = VALUE; #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ NULL_TREE); #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ NULL_TREE); #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ NULL_TREE); #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ NULL_TREE); #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ NULL_TREE); #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ NULL_TREE); #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ NULL_TREE); #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_POINTER_TYPE(ENUM, TYPE) \ builtin_types[(int) ENUM] \ = build_pointer_type (builtin_types[(int) TYPE]); #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_POINTER_TYPE builtin_types[(int) BT_LAST] = NULL_TREE; /* Initialize synchronization builtins. */ #undef DEF_SYNC_BUILTIN #define DEF_SYNC_BUILTIN(code, name, type, attr) \ gfc_define_builtin (name, builtin_types[type], code, name, \ attr); #include "../sync-builtins.def" #undef DEF_SYNC_BUILTIN if (gfc_option.gfc_flag_openmp || gfc_option.gfc_flag_openmp_simd || flag_tree_parallelize_loops) { #undef DEF_GOMP_BUILTIN #define DEF_GOMP_BUILTIN(code, name, type, attr) \ gfc_define_builtin ("__builtin_" name, builtin_types[type], \ code, name, attr); #include "../omp-builtins.def" #undef DEF_GOMP_BUILTIN } gfc_define_builtin ("__builtin_trap", builtin_types[BT_FN_VOID], BUILT_IN_TRAP, NULL, ATTR_NOTHROW_LEAF_LIST); TREE_THIS_VOLATILE (builtin_decl_explicit (BUILT_IN_TRAP)) = 1; ftype = build_varargs_function_type_list (ptr_type_node, const_ptr_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_assume_aligned", ftype, BUILT_IN_ASSUME_ALIGNED, "__builtin_assume_aligned", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__emutls_get_address", builtin_types[BT_FN_PTR_PTR], BUILT_IN_EMUTLS_GET_ADDRESS, "__emutls_get_address", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__emutls_register_common", builtin_types[BT_FN_VOID_PTR_WORD_WORD_PTR], BUILT_IN_EMUTLS_REGISTER_COMMON, "__emutls_register_common", ATTR_NOTHROW_LEAF_LIST); build_common_builtin_nodes (); targetm.init_builtins (); }
gcc_type_get_pointer (gcc_type node) { return gcc_private_make_pointer_type (build_pointer_type (node.inner)); }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. */ static void mx_register_decls (tree decl, tree *stmt_list) { tree finally_stmts = NULL_TREE; tree_stmt_iterator initially_stmts = tsi_start (*stmt_list); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; tree unregister_fncall, unregister_fncall_params; tree register_fncall, register_fncall_params; size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), NULL_TREE))); /* __mf_unregister (...) */ unregister_fncall = build_function_call_expr (mf_unregister_fndecl, unregister_fncall_params); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ variable_name = mf_varname_tree (decl); register_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), tree_cons (NULL_TREE, variable_name, NULL_TREE)))); /* __mf_register (...) */ register_fncall = build_function_call_expr (mf_register_fndecl, register_fncall_params); /* Accumulate the two calls. */ /* ??? Set EXPR_LOCATION. */ gimplify_stmt (®ister_fncall); gimplify_stmt (&unregister_fncall); /* Add the __mf_register call at the current appending point. */ if (tsi_end_p (initially_stmts)) warning (0, "mudflap cannot track %qs in stub function", IDENTIFIER_POINTER (DECL_NAME (decl))); else { tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT); /* Accumulate the FINALLY piece. */ append_to_statement_list (unregister_fncall, &finally_stmts); } mf_mark (decl); } decl = TREE_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL_TREE) { tree t = build2 (TRY_FINALLY_EXPR, void_type_node, *stmt_list, finally_stmts); *stmt_list = NULL; append_to_statement_list (t, stmt_list); } }
static void instrument_bool_enum_load (gimple_stmt_iterator *gsi) { gimple stmt = gsi_stmt (*gsi); tree rhs = gimple_assign_rhs1 (stmt); tree type = TREE_TYPE (rhs); tree minv = NULL_TREE, maxv = NULL_TREE; if (TREE_CODE (type) == BOOLEAN_TYPE && (flag_sanitize & SANITIZE_BOOL)) { minv = boolean_false_node; maxv = boolean_true_node; } else if (TREE_CODE (type) == ENUMERAL_TYPE && (flag_sanitize & SANITIZE_ENUM) && TREE_TYPE (type) != NULL_TREE && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE && (TYPE_PRECISION (TREE_TYPE (type)) < GET_MODE_PRECISION (TYPE_MODE (type)))) { minv = TYPE_MIN_VALUE (TREE_TYPE (type)); maxv = TYPE_MAX_VALUE (TREE_TYPE (type)); } else return; int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type)); HOST_WIDE_INT bitsize, bitpos; tree offset; enum machine_mode mode; int volatilep = 0, unsignedp = 0; tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); tree utype = build_nonstandard_integer_type (modebitsize, 1); if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base)) || (bitpos % modebitsize) != 0 || bitsize != modebitsize || GET_MODE_BITSIZE (TYPE_MODE (utype)) != modebitsize || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) return; location_t loc = gimple_location (stmt); tree ptype = build_pointer_type (TREE_TYPE (rhs)); tree atype = reference_alias_ptr_type (rhs); gimple g = gimple_build_assign (make_ssa_name (ptype, NULL), build_fold_addr_expr (rhs)); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g), build_int_cst (atype, 0)); tree urhs = make_ssa_name (utype, NULL); g = gimple_build_assign (urhs, mem); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); minv = fold_convert (utype, minv); maxv = fold_convert (utype, maxv); if (!integer_zerop (minv)) { g = gimple_build_assign_with_ops (MINUS_EXPR, make_ssa_name (utype, NULL), urhs, minv); gimple_set_location (g, loc); gsi_insert_before (gsi, g, GSI_SAME_STMT); } gimple_stmt_iterator gsi2 = *gsi; basic_block then_bb, fallthru_bb; *gsi = create_cond_insert_point (gsi, true, false, true, &then_bb, &fallthru_bb); g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g), int_const_binop (MINUS_EXPR, maxv, minv), NULL_TREE, NULL_TREE); gimple_set_location (g, loc); gsi_insert_after (gsi, g, GSI_NEW_STMT); gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs, NULL_TREE); update_stmt (stmt); tree data = ubsan_create_data ("__ubsan_invalid_value_data", loc, NULL, ubsan_type_descriptor (type, false), NULL_TREE); data = build_fold_addr_expr_loc (loc, data); tree fn = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE); gsi2 = gsi_after_labels (then_bb); tree val = force_gimple_operand_gsi (&gsi2, ubsan_encode_value (urhs), true, NULL_TREE, true, GSI_SAME_STMT); g = gimple_build_call (fn, 2, data, val); gimple_set_location (g, loc); gsi_insert_before (&gsi2, g, GSI_SAME_STMT); }