static struct pointer_set_t * suggest_attribute (int option, tree decl, bool known_finite, struct pointer_set_t *warned_about, const char * attrib_name) { if (!option_enabled (option, &global_options)) return warned_about; if (TREE_THIS_VOLATILE (decl) || (known_finite && function_always_visible_to_compiler_p (decl))) return warned_about; if (!warned_about) warned_about = pointer_set_create (); if (pointer_set_contains (warned_about, decl)) return warned_about; pointer_set_insert (warned_about, decl); warning_at (DECL_SOURCE_LOCATION (decl), option, known_finite ? _("function might be candidate for attribute %<%s%>") : _("function might be candidate for attribute %<%s%>" " if it is known to return normally"), attrib_name); return warned_about; }
tree prepare_eh_table_type (tree type) { tree exp; tree *slot; const char *name; char *buf; tree decl; tree utf8_ref; /* The "type" (match_info) in a (Java) exception table is a pointer to: * a) NULL - meaning match any type in a try-finally. * b) a pointer to a pointer to a class. * c) a pointer to a pointer to a utf8_ref. The pointer is * rewritten to point to the appropriate class. */ if (type == NULL_TREE) return NULL_TREE; if (TYPE_TO_RUNTIME_MAP (output_class) == NULL) TYPE_TO_RUNTIME_MAP (output_class) = java_treetreehash_create (10); slot = java_treetreehash_new (TYPE_TO_RUNTIME_MAP (output_class), type); if (*slot != NULL) return TREE_VALUE (*slot); if (is_compiled_class (type) && !flag_indirect_dispatch) { name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), ptr_type_node); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; DECL_INITIAL (decl) = build_class_ref (type); layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl); } else { utf8_ref = build_utf8_ref (DECL_NAME (TYPE_NAME (type))); name = IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (utf8_ref, 0))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), utf8const_ptr_type); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (utf8const_ptr_type), decl); CONSTRUCTOR_APPEND_ELT (TYPE_CATCH_CLASSES (output_class), NULL_TREE, make_catch_class_record (exp, utf8_ref)); } exp = convert (ptr_type_node, exp); *slot = tree_cons (type, exp, NULL_TREE); return exp; }
static unsigned int ipa_comdats (void) { pointer_map<tree> map; pointer_map<symtab_node *> comdat_head_map; symtab_node *symbol; bool comdat_group_seen = false; symtab_node *first = (symtab_node *) (void *) 1; tree group; /* Start the dataflow by assigning comdat group to symbols that are in comdat groups already. All other externally visible symbols must stay, we use ERROR_MARK_NODE as bottom for the propagation. */ FOR_EACH_DEFINED_SYMBOL (symbol) if (!symtab_real_symbol_p (symbol)) ; else if ((group = symbol->get_comdat_group ()) != NULL) { *map.insert (symbol) = group; *comdat_head_map.insert (group) = symbol; comdat_group_seen = true; /* Mark the symbol so we won't waste time visiting it for dataflow. */ symbol->aux = (symtab_node *) (void *) 1; } /* See symbols that can not be privatized to comdats; that is externally visible symbols or otherwise used ones. We also do not want to mangle user section names. */ else if (symbol->externally_visible || symbol->force_output || symbol->used_from_other_partition || TREE_THIS_VOLATILE (symbol->decl) || symbol->get_section () || (TREE_CODE (symbol->decl) == FUNCTION_DECL && (DECL_STATIC_CONSTRUCTOR (symbol->decl) || DECL_STATIC_DESTRUCTOR (symbol->decl)))) { *map.insert (symtab_alias_ultimate_target (symbol, NULL)) = error_mark_node; /* Mark the symbol so we won't waste time visiting it for dataflow. */ symbol->aux = (symtab_node *) (void *) 1; } else { /* Enqueue symbol for dataflow. */ symbol->aux = first; first = symbol; } if (!comdat_group_seen) { FOR_EACH_DEFINED_SYMBOL (symbol) symbol->aux = NULL; return 0; } /* The actual dataflow. */ while (first != (void *) 1) { tree group = NULL; tree newgroup, *val; symbol = first; first = (symtab_node *)first->aux; /* Get current lattice value of SYMBOL. */ val = map.contains (symbol); if (val) group = *val; /* If it is bottom, there is nothing to do; do not clear AUX so we won't re-queue the symbol. */ if (group == error_mark_node) continue; newgroup = propagate_comdat_group (symbol, group, map); /* If nothing changed, proceed to next symbol. */ if (newgroup == group) { symbol->aux = NULL; continue; } /* Update lattice value and enqueue all references for re-visiting. */ gcc_assert (newgroup); if (val) *val = newgroup; else *map.insert (symbol) = newgroup; enqueue_references (&first, symbol); /* We may need to revisit the symbol unless it is BOTTOM. */ if (newgroup != error_mark_node) symbol->aux = NULL; } /* Finally assign symbols to the sections. */ FOR_EACH_DEFINED_SYMBOL (symbol) { symbol->aux = NULL; if (!symbol->get_comdat_group () && !symbol->alias && symtab_real_symbol_p (symbol)) { tree group = *map.contains (symbol); if (group == error_mark_node) continue; if (dump_file) { fprintf (dump_file, "Localizing symbol\n"); dump_symtab_node (dump_file, symbol); fprintf (dump_file, "To group: %s\n", IDENTIFIER_POINTER (group)); } symtab_for_node_and_aliases (symbol, set_comdat_group, *comdat_head_map.contains (group), true); } } return 0; }
static void gfc_init_builtin_functions (void) { enum builtin_type { #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME, #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME, #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME, #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME, #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME, #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME, #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME, #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) NAME, #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) NAME, #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) NAME, #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME, #define DEF_POINTER_TYPE(NAME, TYPE) NAME, #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_POINTER_TYPE BT_LAST }; tree mfunc_float[6]; tree mfunc_double[6]; tree mfunc_longdouble[6]; tree mfunc_cfloat[6]; tree mfunc_cdouble[6]; tree mfunc_clongdouble[6]; tree func_cfloat_float, func_float_cfloat; tree func_cdouble_double, func_double_cdouble; tree func_clongdouble_longdouble, func_longdouble_clongdouble; tree func_float_floatp_floatp; tree func_double_doublep_doublep; tree func_longdouble_longdoublep_longdoublep; tree ftype, ptype; tree builtin_types[(int) BT_LAST + 1]; build_builtin_fntypes (mfunc_float, float_type_node); build_builtin_fntypes (mfunc_double, double_type_node); build_builtin_fntypes (mfunc_longdouble, long_double_type_node); build_builtin_fntypes (mfunc_cfloat, complex_float_type_node); build_builtin_fntypes (mfunc_cdouble, complex_double_type_node); build_builtin_fntypes (mfunc_clongdouble, complex_long_double_type_node); func_cfloat_float = build_function_type_list (float_type_node, complex_float_type_node, NULL_TREE); func_float_cfloat = build_function_type_list (complex_float_type_node, float_type_node, NULL_TREE); func_cdouble_double = build_function_type_list (double_type_node, complex_double_type_node, NULL_TREE); func_double_cdouble = build_function_type_list (complex_double_type_node, double_type_node, NULL_TREE); func_clongdouble_longdouble = build_function_type_list (long_double_type_node, complex_long_double_type_node, NULL_TREE); func_longdouble_clongdouble = build_function_type_list (complex_long_double_type_node, long_double_type_node, NULL_TREE); ptype = build_pointer_type (float_type_node); func_float_floatp_floatp = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (double_type_node); func_double_doublep_doublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (long_double_type_node); func_longdouble_longdoublep_longdoublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); /* Non-math builtins are defined manually, so they're not included here. */ #define OTHER_BUILTIN(ID,NAME,TYPE,CONST) #include "mathbuiltins.def" gfc_define_builtin ("__builtin_roundl", mfunc_longdouble[0], BUILT_IN_ROUNDL, "roundl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_round", mfunc_double[0], BUILT_IN_ROUND, "round", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_roundf", mfunc_float[0], BUILT_IN_ROUNDF, "roundf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncl", mfunc_longdouble[0], BUILT_IN_TRUNCL, "truncl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_trunc", mfunc_double[0], BUILT_IN_TRUNC, "trunc", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncf", mfunc_float[0], BUILT_IN_TRUNCF, "truncf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsl", func_clongdouble_longdouble, BUILT_IN_CABSL, "cabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabs", func_cdouble_double, BUILT_IN_CABS, "cabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsf", func_cfloat_float, BUILT_IN_CABSF, "cabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignl", mfunc_longdouble[1], BUILT_IN_COPYSIGNL, "copysignl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysign", mfunc_double[1], BUILT_IN_COPYSIGN, "copysign", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignf", mfunc_float[1], BUILT_IN_COPYSIGNF, "copysignf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterl", mfunc_longdouble[1], BUILT_IN_NEXTAFTERL, "nextafterl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafter", mfunc_double[1], BUILT_IN_NEXTAFTER, "nextafter", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterf", mfunc_float[1], BUILT_IN_NEXTAFTERF, "nextafterf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpl", mfunc_longdouble[4], BUILT_IN_FREXPL, "frexpl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexp", mfunc_double[4], BUILT_IN_FREXP, "frexp", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpf", mfunc_float[4], BUILT_IN_FREXPF, "frexpf", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsl", mfunc_longdouble[0], BUILT_IN_FABSL, "fabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabs", mfunc_double[0], BUILT_IN_FABS, "fabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsf", mfunc_float[0], BUILT_IN_FABSF, "fabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnl", mfunc_longdouble[5], BUILT_IN_SCALBNL, "scalbnl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbn", mfunc_double[5], BUILT_IN_SCALBN, "scalbn", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnf", mfunc_float[5], BUILT_IN_SCALBNF, "scalbnf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodl", mfunc_longdouble[1], BUILT_IN_FMODL, "fmodl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmod", mfunc_double[1], BUILT_IN_FMOD, "fmod", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodf", mfunc_float[1], BUILT_IN_FMODF, "fmodf", ATTR_CONST_NOTHROW_LEAF_LIST); /* iround{f,,l}, lround{f,,l} and llround{f,,l} */ ftype = build_function_type_list (integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundf", ftype, BUILT_IN_IROUNDF, "iroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundf", ftype, BUILT_IN_LROUNDF, "lroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundf", ftype, BUILT_IN_LLROUNDF, "llroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iround", ftype, BUILT_IN_IROUND, "iround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lround", ftype, BUILT_IN_LROUND, "lround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llround", ftype, BUILT_IN_LLROUND, "llround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundl", ftype, BUILT_IN_IROUNDL, "iroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundl", ftype, BUILT_IN_LROUNDL, "lroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundl", ftype, BUILT_IN_LLROUNDL, "llroundl", ATTR_CONST_NOTHROW_LEAF_LIST); /* These are used to implement the ** operator. */ gfc_define_builtin ("__builtin_powl", mfunc_longdouble[1], BUILT_IN_POWL, "powl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_pow", mfunc_double[1], BUILT_IN_POW, "pow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powf", mfunc_float[1], BUILT_IN_POWF, "powf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowl", mfunc_clongdouble[1], BUILT_IN_CPOWL, "cpowl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpow", mfunc_cdouble[1], BUILT_IN_CPOW, "cpow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowf", mfunc_cfloat[1], BUILT_IN_CPOWF, "cpowf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powil", mfunc_longdouble[2], BUILT_IN_POWIL, "powil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powi", mfunc_double[2], BUILT_IN_POWI, "powi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powif", mfunc_float[2], BUILT_IN_POWIF, "powif", ATTR_CONST_NOTHROW_LEAF_LIST); if (targetm.libc_has_function (function_c99_math_complex)) { gfc_define_builtin ("__builtin_cbrtl", mfunc_longdouble[0], BUILT_IN_CBRTL, "cbrtl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrt", mfunc_double[0], BUILT_IN_CBRT, "cbrt", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrtf", mfunc_float[0], BUILT_IN_CBRTF, "cbrtf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpil", func_longdouble_clongdouble, BUILT_IN_CEXPIL, "cexpil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpi", func_double_cdouble, BUILT_IN_CEXPI, "cexpi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpif", func_float_cfloat, BUILT_IN_CEXPIF, "cexpif", ATTR_CONST_NOTHROW_LEAF_LIST); } if (targetm.libc_has_function (function_sincos)) { gfc_define_builtin ("__builtin_sincosl", func_longdouble_longdoublep_longdoublep, BUILT_IN_SINCOSL, "sincosl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincos", func_double_doublep_doublep, BUILT_IN_SINCOS, "sincos", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincosf", func_float_floatp_floatp, BUILT_IN_SINCOSF, "sincosf", ATTR_NOTHROW_LEAF_LIST); } /* For LEADZ, TRAILZ, POPCNT and POPPAR. */ ftype = build_function_type_list (integer_type_node, unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "__builtin_clz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctz", ftype, BUILT_IN_CTZ, "__builtin_ctz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parity", ftype, BUILT_IN_PARITY, "__builtin_parity", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcount", ftype, BUILT_IN_POPCOUNT, "__builtin_popcount", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "__builtin_clzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzl", ftype, BUILT_IN_CTZL, "__builtin_ctzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityl", ftype, BUILT_IN_PARITYL, "__builtin_parityl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountl", ftype, BUILT_IN_POPCOUNTL, "__builtin_popcountl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "__builtin_clzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzll", ftype, BUILT_IN_CTZLL, "__builtin_ctzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityll", ftype, BUILT_IN_PARITYLL, "__builtin_parityll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountll", ftype, BUILT_IN_POPCOUNTLL, "__builtin_popcountll", ATTR_CONST_NOTHROW_LEAF_LIST); /* Other builtin functions we use. */ ftype = build_function_type_list (long_integer_type_node, long_integer_type_node, long_integer_type_node, NULL_TREE); gfc_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT, "__builtin_expect", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (void_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_free", ftype, BUILT_IN_FREE, "free", ATTR_NOTHROW_LEAF_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_malloc", ftype, BUILT_IN_MALLOC, "malloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_calloc", ftype, BUILT_IN_CALLOC, "calloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); DECL_IS_MALLOC (builtin_decl_explicit (BUILT_IN_CALLOC)) = 1; ftype = build_function_type_list (pvoid_type_node, size_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_realloc", ftype, BUILT_IN_REALLOC, "realloc", ATTR_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, void_type_node, NULL_TREE); gfc_define_builtin ("__builtin_isnan", ftype, BUILT_IN_ISNAN, "__builtin_isnan", ATTR_CONST_NOTHROW_LEAF_LIST); #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \ builtin_types[(int) ENUM] = VALUE; #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ NULL_TREE); #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ NULL_TREE); #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ NULL_TREE); #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ NULL_TREE); #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ NULL_TREE); #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ NULL_TREE); #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ NULL_TREE); #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_POINTER_TYPE(ENUM, TYPE) \ builtin_types[(int) ENUM] \ = build_pointer_type (builtin_types[(int) TYPE]); #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_POINTER_TYPE builtin_types[(int) BT_LAST] = NULL_TREE; /* Initialize synchronization builtins. */ #undef DEF_SYNC_BUILTIN #define DEF_SYNC_BUILTIN(code, name, type, attr) \ gfc_define_builtin (name, builtin_types[type], code, name, \ attr); #include "../sync-builtins.def" #undef DEF_SYNC_BUILTIN if (gfc_option.gfc_flag_openmp || flag_tree_parallelize_loops) { #undef DEF_GOMP_BUILTIN #define DEF_GOMP_BUILTIN(code, name, type, attr) \ gfc_define_builtin ("__builtin_" name, builtin_types[type], \ code, name, attr); #include "../omp-builtins.def" #undef DEF_GOMP_BUILTIN } gfc_define_builtin ("__builtin_trap", builtin_types[BT_FN_VOID], BUILT_IN_TRAP, NULL, ATTR_NOTHROW_LEAF_LIST); TREE_THIS_VOLATILE (builtin_decl_explicit (BUILT_IN_TRAP)) = 1; gfc_define_builtin ("__emutls_get_address", builtin_types[BT_FN_PTR_PTR], BUILT_IN_EMUTLS_GET_ADDRESS, "__emutls_get_address", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__emutls_register_common", builtin_types[BT_FN_VOID_PTR_WORD_WORD_PTR], BUILT_IN_EMUTLS_REGISTER_COMMON, "__emutls_register_common", ATTR_NOTHROW_LEAF_LIST); build_common_builtin_nodes (); targetm.init_builtins (); }
static const char * gen_decl (tree decl, int is_func_definition, formals_style style) { const char *ret_val; if (DECL_NAME (decl)) ret_val = IDENTIFIER_POINTER (DECL_NAME (decl)); else ret_val = ""; /* If we are just generating a list of names of formal parameters, we can simply return the formal parameter name (with no typing information attached to it) now. */ if (style == k_and_r_names) return ret_val; /* Note that for the declaration of some entity (either a function or a data object, like for instance a parameter) if the entity itself was declared as either const or volatile, then const and volatile properties are associated with just the declaration of the entity, and *not* with the `type' of the entity. Thus, for such declared entities, we have to generate the qualifiers here. */ if (TREE_THIS_VOLATILE (decl)) ret_val = concat ("volatile ", ret_val, NULL); if (TREE_READONLY (decl)) ret_val = concat ("const ", ret_val, NULL); data_type = ""; /* For FUNCTION_DECL nodes, there are two possible cases here. First, if this FUNCTION_DECL node was generated from a function "definition", then we will have a list of DECL_NODE's, one for each of the function's formal parameters. In this case, we can print out not only the types of each formal, but also each formal's name. In the second case, this FUNCTION_DECL node came from an actual function declaration (and *not* a definition). In this case, we do nothing here because the formal argument type-list will be output later, when the "type" of the function is added to the string we are building. Note that the ANSI-style formal parameter list is considered to be a (suffix) part of the "type" of the function. */ if (TREE_CODE (decl) == FUNCTION_DECL && is_func_definition) { ret_val = concat (ret_val, gen_formal_list_for_func_def (decl, ansi), NULL); /* Since we have already added in the formals list stuff, here we don't add the whole "type" of the function we are considering (which would include its parameter-list info), rather, we only add in the "type" of the "type" of the function, which is really just the return-type of the function (and does not include the parameter list info). */ ret_val = gen_type (ret_val, TREE_TYPE (TREE_TYPE (decl)), style); } else ret_val = gen_type (ret_val, TREE_TYPE (decl), style); ret_val = affix_data_type (ret_val); if (TREE_CODE (decl) != FUNCTION_DECL && C_DECL_REGISTER (decl)) ret_val = concat ("register ", ret_val, NULL); if (TREE_PUBLIC (decl)) ret_val = concat ("extern ", ret_val, NULL); if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl)) ret_val = concat ("static ", ret_val, NULL); return ret_val; }
static inline void check_decl (funct_state local, tree t, bool checking_write, bool ipa) { /* Do not want to do anything with volatile except mark any function that uses one to be not const or pure. */ if (TREE_THIS_VOLATILE (t)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Volatile operand is not const/pure"); return; } /* Do not care about a local automatic that is not static. */ if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) return; /* If the variable has the "used" attribute, treat it as if it had a been touched by the devil. */ if (DECL_PRESERVE_P (t)) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " Used static/global variable is not const/pure\n"); return; } /* In IPA mode we are not interested in checking actual loads and stores; they will be processed at propagation time using ipa_ref. */ if (ipa) return; /* Since we have dealt with the locals and params cases above, if we are CHECKING_WRITE, this cannot be a pure or constant function. */ if (checking_write) { local->pure_const_state = IPA_NEITHER; if (dump_file) fprintf (dump_file, " static/global memory write is not const/pure\n"); return; } if (DECL_EXTERNAL (t) || TREE_PUBLIC (t)) { /* Readonly reads are safe. */ if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t))) return; /* Read of a constant, do not change the function state. */ else { if (dump_file) fprintf (dump_file, " global memory read is not const\n"); /* Just a regular read. */ if (local->pure_const_state == IPA_CONST) local->pure_const_state = IPA_PURE; } } else { /* Compilation level statics can be read if they are readonly variables. */ if (TREE_READONLY (t)) return; if (dump_file) fprintf (dump_file, " static memory read is not const\n"); /* Just a regular read. */ if (local->pure_const_state == IPA_CONST) local->pure_const_state = IPA_PURE; } }
void xml_type(tree t, tree opt_decl, int indent, FILE *out) { tree a = NULL; if (AGGREGATE_TYPE_P(t)) return xml_type_ref(t, indent, out); switch (TREE_CODE(t)) { case POINTER_TYPE: fprintf(out, "%s<addr-of", spc(indent)); xml_type_quals(TYPE_QUALS(t), out); xml_type_name(TYPE_NAME(t), out); fprintf(out, ">\n"); xml_type(TREE_TYPE(t), NULL, indent + INDENT, out); fprintf(out, "%s</addr-of>\n", spc(indent)); break; case REAL_TYPE: fprintf(out, "%s<float", spc(indent)); xml_type_quals(TYPE_QUALS(t), out); xml_type_name(TYPE_NAME(t), out); fprintf(out, " precision='%d'", TYPE_PRECISION(t)); fprintf(out, " />\n"); break; case INTEGER_TYPE: fprintf(out, "%s<integer", spc(indent)); xml_type_quals(TYPE_QUALS(t), out); xml_type_name(TYPE_NAME(t), out); if (TYPE_UNSIGNED(t)) fprintf(out, " unsigned='1'"); fprintf(out, " precision='%d'", TYPE_PRECISION(t)); /* TREE_TYPE here indicates that there is an interesting domain. */ if (TREE_TYPE(t) && TYPE_MIN_VALUE(t)) fprintf(out, (TYPE_UNSIGNED(t) ? " min='%llu'" : " min='%lld'"), double_int_to_ll(TREE_INT_CST(TYPE_MIN_VALUE(t)))); if (TREE_TYPE(t) && TYPE_MAX_VALUE(t)) fprintf(out, (TYPE_UNSIGNED(t) ? " max='%llu'" : " max='%lld'"), double_int_to_ll(TREE_INT_CST(TYPE_MAX_VALUE(t)))); fprintf(out, " />\n"); break; case VOID_TYPE: fprintf(out, "%s<void />\n", spc(indent)); break; case BOOLEAN_TYPE: fprintf(out, "%s<boolean />\n", spc(indent)); break; case RESULT_DECL: fprintf(out, "%s<result />\n", spc(indent)); break; case ENUMERAL_TYPE: /* TODO: finish this (output tags). */ fprintf(out, "%s<enum", spc(indent)); xml_type_quals(TYPE_QUALS(t), out); xml_type_name(TYPE_NAME(t), out); fprintf(out, " />\n"); break; case METHOD_TYPE: case FUNCTION_TYPE: fprintf(out, "%s<function", spc(indent)); xml_type_quals(TYPE_QUALS(t), out); xml_type_name(TYPE_NAME(t), out); xml_type_attribs(TYPE_ATTRIBUTES(t), (opt_decl && TREE_THIS_VOLATILE(opt_decl)) ? "noreturn" : NULL, out); indent += INDENT; fprintf(out, ">\n%s<return>\n", spc(indent)); xml_type(TREE_TYPE(t), NULL, indent + INDENT, out); fprintf(out, "%s</return>\n", spc(indent)); /* varargs if last is not void. */ for (a = TYPE_ARG_TYPES(t); a && TREE_CHAIN(a); a = TREE_CHAIN(a)) ; fprintf(out, "%s<arguments %s>\n", spc(indent), (!a || TREE_CODE(TREE_VALUE(a)) == VOID_TYPE) ? "" : "varargs='1' "); for (a = TYPE_ARG_TYPES(t); a; a = TREE_CHAIN(a)) { xml_type(TREE_VALUE(a), NULL, indent + INDENT, out); } fprintf(out, "%s</arguments>\n", spc(indent)); indent -= INDENT; fprintf(out, "%s</function>\n", spc(indent)); break; case REFERENCE_TYPE: fprintf(stderr, "lighthouse warning: ignoring unhandled tree type '%s'.\n", tree_code_name[TREE_CODE(t)]); break; default: fprintf(stderr, "failing: unhandled tree type %s\n", tree_code_name[TREE_CODE(t)]); assert(0); abort(); } }
static void output_gimple_stmt (struct output_block *ob, gimple stmt) { unsigned i; enum gimple_code code; enum LTO_tags tag; struct bitpack_d bp; histogram_value hist; /* Emit identifying tag. */ code = gimple_code (stmt); tag = lto_gimple_code_to_tag (code); streamer_write_record_start (ob, tag); /* Emit the tuple header. */ bp = bitpack_create (ob->main_stream); bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt)); bp_pack_value (&bp, gimple_no_warning_p (stmt), 1); if (is_gimple_assign (stmt)) bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1); bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1); hist = gimple_histogram_value (cfun, stmt); bp_pack_value (&bp, hist != NULL, 1); bp_pack_var_len_unsigned (&bp, stmt->gsbase.subcode); /* Emit location information for the statement. */ stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt))); streamer_write_bitpack (&bp); /* Emit the lexical block holding STMT. */ stream_write_tree (ob, gimple_block (stmt), true); /* Emit the operands. */ switch (gimple_code (stmt)) { case GIMPLE_RESX: streamer_write_hwi (ob, gimple_resx_region (stmt)); break; case GIMPLE_EH_MUST_NOT_THROW: stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true); break; case GIMPLE_EH_DISPATCH: streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt)); break; case GIMPLE_ASM: streamer_write_uhwi (ob, gimple_asm_ninputs (stmt)); streamer_write_uhwi (ob, gimple_asm_noutputs (stmt)); streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt)); streamer_write_uhwi (ob, gimple_asm_nlabels (stmt)); streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt), true); /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < gimple_num_ops (stmt); i++) { tree op = gimple_op (stmt, i); tree *basep = NULL; /* Wrap all uses of non-automatic variables inside MEM_REFs so that we do not have to deal with type mismatches on merged symbols during IL read in. The first operand of GIMPLE_DEBUG must be a decl, not MEM_REF, though. */ if (op && (i || !is_gimple_debug (stmt))) { basep = &op; while (handled_component_p (*basep)) basep = &TREE_OPERAND (*basep, 0); if (TREE_CODE (*basep) == VAR_DECL && !auto_var_in_fn_p (*basep, current_function_decl) && !DECL_REGISTER (*basep)) { bool volatilep = TREE_THIS_VOLATILE (*basep); *basep = build2 (MEM_REF, TREE_TYPE (*basep), build_fold_addr_expr (*basep), build_int_cst (build_pointer_type (TREE_TYPE (*basep)), 0)); TREE_THIS_VOLATILE (*basep) = volatilep; } else basep = NULL; } stream_write_tree (ob, op, true); /* Restore the original base if we wrapped it inside a MEM_REF. */ if (basep) *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) streamer_write_enum (ob->main_stream, internal_fn, IFN_LAST, gimple_call_internal_fn (stmt)); else stream_write_tree (ob, gimple_call_fntype (stmt), true); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gcc_assert (gimple_transaction_body (stmt) == NULL); stream_write_tree (ob, gimple_transaction_label (stmt), true); break; default: gcc_unreachable (); } if (hist) stream_out_histogram_value (ob, hist); }
static gimple input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in, struct function *fn, enum LTO_tags tag) { gimple stmt; enum gimple_code code; unsigned HOST_WIDE_INT num_ops; size_t i; struct bitpack_d bp; bool has_hist; code = lto_tag_to_gimple_code (tag); /* Read the tuple header. */ bp = streamer_read_bitpack (ib); num_ops = bp_unpack_var_len_unsigned (&bp); stmt = gimple_alloc (code, num_ops); stmt->gsbase.no_warning = bp_unpack_value (&bp, 1); if (is_gimple_assign (stmt)) stmt->gsbase.nontemporal_move = bp_unpack_value (&bp, 1); stmt->gsbase.has_volatile_ops = bp_unpack_value (&bp, 1); has_hist = bp_unpack_value (&bp, 1); stmt->gsbase.subcode = bp_unpack_var_len_unsigned (&bp); /* Read location information. */ gimple_set_location (stmt, stream_input_location (&bp, data_in)); /* Read lexical block reference. */ gimple_set_block (stmt, stream_read_tree (ib, data_in)); /* Read in all the operands. */ switch (code) { case GIMPLE_RESX: gimple_resx_set_region (stmt, streamer_read_hwi (ib)); break; case GIMPLE_EH_MUST_NOT_THROW: gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in)); break; case GIMPLE_EH_DISPATCH: gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib)); break; case GIMPLE_ASM: { /* FIXME lto. Move most of this into a new gimple_asm_set_string(). */ tree str; stmt->gimple_asm.ni = streamer_read_uhwi (ib); stmt->gimple_asm.no = streamer_read_uhwi (ib); stmt->gimple_asm.nc = streamer_read_uhwi (ib); stmt->gimple_asm.nl = streamer_read_uhwi (ib); str = streamer_read_string_cst (data_in, ib); stmt->gimple_asm.string = TREE_STRING_POINTER (str); } /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < num_ops; i++) { tree *opp, op = stream_read_tree (ib, data_in); gimple_set_op (stmt, i, op); if (!op) continue; opp = gimple_op_ptr (stmt, i); if (TREE_CODE (*opp) == ADDR_EXPR) opp = &TREE_OPERAND (*opp, 0); while (handled_component_p (*opp)) { if (TREE_CODE (*opp) == COMPONENT_REF) { /* Fixup FIELD_DECLs in COMPONENT_REFs, they are not handled by decl merging. */ tree field, type, tem; tree closest_match = NULL_TREE; field = TREE_OPERAND (*opp, 1); type = DECL_CONTEXT (field); for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem)) { if (TREE_CODE (tem) != FIELD_DECL) continue; if (tem == field) break; if (DECL_NONADDRESSABLE_P (tem) == DECL_NONADDRESSABLE_P (field) && gimple_compare_field_offset (tem, field)) { if (types_compatible_p (TREE_TYPE (tem), TREE_TYPE (field))) break; else closest_match = tem; } } /* In case of type mismatches across units we can fail to unify some types and thus not find a proper field-decl here. */ if (tem == NULL_TREE) { /* Thus, emit a ODR violation warning. */ if (warning_at (gimple_location (stmt), 0, "use of type %<%E%> with two mismatching " "declarations at field %<%E%>", type, TREE_OPERAND (*opp, 1))) { if (TYPE_FIELDS (type)) inform (DECL_SOURCE_LOCATION (TYPE_FIELDS (type)), "original type declared here"); inform (DECL_SOURCE_LOCATION (TREE_OPERAND (*opp, 1)), "field in mismatching type declared here"); if (TYPE_NAME (TREE_TYPE (field)) && (TREE_CODE (TYPE_NAME (TREE_TYPE (field))) == TYPE_DECL)) inform (DECL_SOURCE_LOCATION (TYPE_NAME (TREE_TYPE (field))), "type of field declared here"); if (closest_match && TYPE_NAME (TREE_TYPE (closest_match)) && (TREE_CODE (TYPE_NAME (TREE_TYPE (closest_match))) == TYPE_DECL)) inform (DECL_SOURCE_LOCATION (TYPE_NAME (TREE_TYPE (closest_match))), "type of mismatching field declared here"); } /* And finally fixup the types. */ TREE_OPERAND (*opp, 0) = build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (*opp, 0)); } else TREE_OPERAND (*opp, 1) = tem; } else if ((TREE_CODE (*opp) == ARRAY_REF || TREE_CODE (*opp) == ARRAY_RANGE_REF) && (TREE_CODE (TREE_TYPE (TREE_OPERAND (*opp, 0))) != ARRAY_TYPE)) { /* And ARRAY_REFs to objects that had mismatched types during symbol merging to avoid ICEs. */ TREE_OPERAND (*opp, 0) = build1 (VIEW_CONVERT_EXPR, build_array_type (TREE_TYPE (*opp), NULL_TREE), TREE_OPERAND (*opp, 0)); } opp = &TREE_OPERAND (*opp, 0); } /* At LTO output time we wrap all global decls in MEM_REFs to allow seamless replacement with prevailing decls. Undo this here if the prevailing decl allows for this. ??? Maybe we should simply fold all stmts. */ if (TREE_CODE (*opp) == MEM_REF && TREE_CODE (TREE_OPERAND (*opp, 0)) == ADDR_EXPR && integer_zerop (TREE_OPERAND (*opp, 1)) && (TREE_THIS_VOLATILE (*opp) == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))) && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*opp, 1))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (*opp, 1)))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0)))) *opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) gimple_call_set_internal_fn (stmt, streamer_read_enum (ib, internal_fn, IFN_LAST)); else gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in)); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in)); break; default: internal_error ("bytecode stream: unknown GIMPLE statement tag %s", lto_tag_name (tag)); } /* Update the properties of symbols, SSA names and labels associated with STMT. */ if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) { tree lhs = gimple_get_lhs (stmt); if (lhs && TREE_CODE (lhs) == SSA_NAME) SSA_NAME_DEF_STMT (lhs) = stmt; } else if (code == GIMPLE_LABEL) gcc_assert (emit_label_in_global_context_p (gimple_label_label (stmt)) || DECL_CONTEXT (gimple_label_label (stmt)) == fn->decl); else if (code == GIMPLE_ASM) { unsigned i; for (i = 0; i < gimple_asm_noutputs (stmt); i++) { tree op = TREE_VALUE (gimple_asm_output_op (stmt, i)); if (TREE_CODE (op) == SSA_NAME) SSA_NAME_DEF_STMT (op) = stmt; } } /* Reset alias information. */ if (code == GIMPLE_CALL) gimple_call_reset_alias_info (stmt); /* Mark the statement modified so its operand vectors can be filled in. */ gimple_set_modified (stmt, true); if (has_hist) stream_in_histogram_value (ib, stmt); return stmt; }
static void get_expr_operands (gimple stmt, tree *expr_p, int flags) { enum tree_code code; enum tree_code_class codeclass; tree expr = *expr_p; int uflags = opf_use; if (expr == NULL) return; if (is_gimple_debug (stmt)) uflags |= (flags & opf_no_vops); code = TREE_CODE (expr); codeclass = TREE_CODE_CLASS (code); switch (code) { case ADDR_EXPR: /* Taking the address of a variable does not represent a reference to it, but the fact that the statement takes its address will be of interest to some passes (e.g. alias resolution). */ if ((!(flags & opf_non_addressable) || (flags & opf_not_non_addressable)) && !is_gimple_debug (stmt)) mark_address_taken (TREE_OPERAND (expr, 0)); /* If the address is invariant, there may be no interesting variable references inside. */ if (is_gimple_min_invariant (expr)) return; /* Otherwise, there may be variables referenced inside but there should be no VUSEs created, since the referenced objects are not really accessed. The only operands that we should find here are ARRAY_REF indices which will always be real operands (GIMPLE does not allow non-registers as array indices). */ flags |= opf_no_vops; get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags | opf_not_non_addressable); return; case SSA_NAME: add_stmt_operand (expr_p, stmt, flags); return; case VAR_DECL: case PARM_DECL: case RESULT_DECL: add_stmt_operand (expr_p, stmt, flags); return; case DEBUG_EXPR_DECL: gcc_assert (gimple_debug_bind_p (stmt)); return; case MEM_REF: get_indirect_ref_operands (stmt, expr, flags, true); return; case TARGET_MEM_REF: get_tmr_operands (stmt, expr, flags); return; case ARRAY_REF: case ARRAY_RANGE_REF: case COMPONENT_REF: case REALPART_EXPR: case IMAGPART_EXPR: { if (TREE_THIS_VOLATILE (expr)) gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); if (code == COMPONENT_REF) { if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) gimple_set_has_volatile_ops (stmt, true); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); } else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) { get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); } return; } case WITH_SIZE_EXPR: /* WITH_SIZE_EXPR is a pass-through reference to its first argument, and an rvalue reference to its second argument. */ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; case COND_EXPR: case VEC_COND_EXPR: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); return; case CONSTRUCTOR: { /* General aggregate CONSTRUCTORs have been decomposed, but they are still in use as the COMPLEX_EXPR equivalent for vectors. */ constructor_elt *ce; unsigned HOST_WIDE_INT idx; for (idx = 0; VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); idx++) get_expr_operands (stmt, &ce->value, uflags); return; } case BIT_FIELD_REF: if (TREE_THIS_VOLATILE (expr)) gimple_set_has_volatile_ops (stmt, true); /* FALLTHRU */ case VIEW_CONVERT_EXPR: do_unary: get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); return; case COMPOUND_EXPR: case OBJ_TYPE_REF: case ASSERT_EXPR: do_binary: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); return; } case DOT_PROD_EXPR: case REALIGN_LOAD_EXPR: case WIDEN_MULT_PLUS_EXPR: case WIDEN_MULT_MINUS_EXPR: case FMA_EXPR: { get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); return; } case FUNCTION_DECL: case LABEL_DECL: case CONST_DECL: case CASE_LABEL_EXPR: /* Expressions that make no memory references. */ return; default: if (codeclass == tcc_unary) goto do_unary; if (codeclass == tcc_binary || codeclass == tcc_comparison) goto do_binary; if (codeclass == tcc_constant || codeclass == tcc_type) return; } /* If we get here, something has gone wrong. */ #ifdef ENABLE_CHECKING fprintf (stderr, "unhandled expression in get_expr_operands():\n"); debug_tree (expr); fputs ("\n", stderr); #endif gcc_unreachable (); }
int set_tree_this_volatile (tree t, int val) { TREE_THIS_VOLATILE (t) = val; }
int get_tree_this_volatile (tree t) { return TREE_THIS_VOLATILE (t); }
static tree c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands, bool *maybe_const_itself, bool for_int_const) { tree ret = expr; enum tree_code code = TREE_CODE (expr); enum tree_code_class kind = TREE_CODE_CLASS (code); location_t loc = EXPR_LOCATION (expr); tree op0, op1, op2, op3; tree orig_op0, orig_op1, orig_op2; bool op0_const = true, op1_const = true, op2_const = true; bool op0_const_self = true, op1_const_self = true, op2_const_self = true; bool nowarning = TREE_NO_WARNING (expr); bool unused_p; source_range old_range; /* Constants, declarations, statements, errors, SAVE_EXPRs and anything else not counted as an expression cannot usefully be folded further at this point. */ if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement || code == SAVE_EXPR) return expr; if (IS_EXPR_CODE_CLASS (kind)) old_range = EXPR_LOCATION_RANGE (expr); /* Operands of variable-length expressions (function calls) have already been folded, as have __builtin_* function calls, and such expressions cannot occur in constant expressions. */ if (kind == tcc_vl_exp) { *maybe_const_operands = false; ret = fold (expr); goto out; } if (code == C_MAYBE_CONST_EXPR) { tree pre = C_MAYBE_CONST_EXPR_PRE (expr); tree inner = C_MAYBE_CONST_EXPR_EXPR (expr); if (C_MAYBE_CONST_EXPR_NON_CONST (expr)) *maybe_const_operands = false; if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr)) { *maybe_const_itself = false; inner = c_fully_fold_internal (inner, in_init, maybe_const_operands, maybe_const_itself, true); } if (pre && !in_init) ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner); else ret = inner; goto out; } /* Assignment, increment, decrement, function call and comma operators, and statement expressions, cannot occur in constant expressions if evaluated / outside of sizeof. (Function calls were handled above, though VA_ARG_EXPR is treated like a function call here, and statement expressions are handled through C_MAYBE_CONST_EXPR to avoid folding inside them.) */ switch (code) { case MODIFY_EXPR: case PREDECREMENT_EXPR: case PREINCREMENT_EXPR: case POSTDECREMENT_EXPR: case POSTINCREMENT_EXPR: case COMPOUND_EXPR: *maybe_const_operands = false; break; case VA_ARG_EXPR: case TARGET_EXPR: case BIND_EXPR: case OBJ_TYPE_REF: *maybe_const_operands = false; ret = fold (expr); goto out; default: break; } /* Fold individual tree codes as appropriate. */ switch (code) { case COMPOUND_LITERAL_EXPR: /* Any non-constancy will have been marked in a containing C_MAYBE_CONST_EXPR; there is no more folding to do here. */ goto out; case COMPONENT_REF: orig_op0 = op0 = TREE_OPERAND (expr, 0); op1 = TREE_OPERAND (expr, 1); op2 = TREE_OPERAND (expr, 2); op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op0); if (op0 != orig_op0) ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2); if (ret != expr) { TREE_READONLY (ret) = TREE_READONLY (expr); TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); } goto out; case ARRAY_REF: orig_op0 = op0 = TREE_OPERAND (expr, 0); orig_op1 = op1 = TREE_OPERAND (expr, 1); op2 = TREE_OPERAND (expr, 2); op3 = TREE_OPERAND (expr, 3); op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op0); op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op1); op1 = decl_constant_value_for_optimization (op1); if (op0 != orig_op0 || op1 != orig_op1) ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3); if (ret != expr) { TREE_READONLY (ret) = TREE_READONLY (expr); TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); } ret = fold (ret); goto out; case COMPOUND_EXPR: case MODIFY_EXPR: case PREDECREMENT_EXPR: case PREINCREMENT_EXPR: case POSTDECREMENT_EXPR: case POSTINCREMENT_EXPR: case PLUS_EXPR: case MINUS_EXPR: case MULT_EXPR: case POINTER_PLUS_EXPR: case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: case TRUNC_MOD_EXPR: case RDIV_EXPR: case EXACT_DIV_EXPR: case LSHIFT_EXPR: case RSHIFT_EXPR: case BIT_IOR_EXPR: case BIT_XOR_EXPR: case BIT_AND_EXPR: case LT_EXPR: case LE_EXPR: case GT_EXPR: case GE_EXPR: case EQ_EXPR: case NE_EXPR: case COMPLEX_EXPR: case TRUTH_AND_EXPR: case TRUTH_OR_EXPR: case TRUTH_XOR_EXPR: case UNORDERED_EXPR: case ORDERED_EXPR: case UNLT_EXPR: case UNLE_EXPR: case UNGT_EXPR: case UNGE_EXPR: case UNEQ_EXPR: /* Binary operations evaluating both arguments (increment and decrement are binary internally in GCC). */ orig_op0 = op0 = TREE_OPERAND (expr, 0); orig_op1 = op1 = TREE_OPERAND (expr, 1); op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op0); if (code != MODIFY_EXPR && code != PREDECREMENT_EXPR && code != PREINCREMENT_EXPR && code != POSTDECREMENT_EXPR && code != POSTINCREMENT_EXPR) op0 = decl_constant_value_for_optimization (op0); /* The RHS of a MODIFY_EXPR was fully folded when building that expression for the sake of conversion warnings. */ if (code != MODIFY_EXPR) op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op1); op1 = decl_constant_value_for_optimization (op1); if (for_int_const && (TREE_CODE (op0) != INTEGER_CST || TREE_CODE (op1) != INTEGER_CST)) goto out; if (op0 != orig_op0 || op1 != orig_op1 || in_init) ret = in_init ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); else ret = fold (expr); if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0) && !TREE_OVERFLOW_P (op1)) overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret); if (code == LSHIFT_EXPR && TREE_CODE (orig_op0) != INTEGER_CST && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE && TREE_CODE (op0) == INTEGER_CST && c_inhibit_evaluation_warnings == 0 && tree_int_cst_sgn (op0) < 0) warning_at (loc, OPT_Wshift_negative_value, "left shift of negative value"); if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) && TREE_CODE (orig_op1) != INTEGER_CST && TREE_CODE (op1) == INTEGER_CST && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE && c_inhibit_evaluation_warnings == 0) { if (tree_int_cst_sgn (op1) < 0) warning_at (loc, OPT_Wshift_count_negative, (code == LSHIFT_EXPR ? G_("left shift count is negative") : G_("right shift count is negative"))); else if (compare_tree_int (op1, TYPE_PRECISION (TREE_TYPE (orig_op0))) >= 0) warning_at (loc, OPT_Wshift_count_overflow, (code == LSHIFT_EXPR ? G_("left shift count >= width of type") : G_("right shift count >= width of type"))); } if (code == LSHIFT_EXPR /* If either OP0 has been folded to INTEGER_CST... */ && ((TREE_CODE (orig_op0) != INTEGER_CST && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE && TREE_CODE (op0) == INTEGER_CST) /* ...or if OP1 has been folded to INTEGER_CST... */ || (TREE_CODE (orig_op1) != INTEGER_CST && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE && TREE_CODE (op1) == INTEGER_CST)) && c_inhibit_evaluation_warnings == 0) /* ...then maybe we can detect an overflow. */ maybe_warn_shift_overflow (loc, op0, op1); if ((code == TRUNC_DIV_EXPR || code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR || code == EXACT_DIV_EXPR || code == TRUNC_MOD_EXPR) && TREE_CODE (orig_op1) != INTEGER_CST && TREE_CODE (op1) == INTEGER_CST && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE) && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE) warn_for_div_by_zero (loc, op1); goto out; case INDIRECT_REF: case FIX_TRUNC_EXPR: case FLOAT_EXPR: CASE_CONVERT: case ADDR_SPACE_CONVERT_EXPR: case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR: case NEGATE_EXPR: case BIT_NOT_EXPR: case TRUTH_NOT_EXPR: case ADDR_EXPR: case CONJ_EXPR: case REALPART_EXPR: case IMAGPART_EXPR: /* Unary operations. */ orig_op0 = op0 = TREE_OPERAND (expr, 0); op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op0); if (code != ADDR_EXPR && code != REALPART_EXPR && code != IMAGPART_EXPR) op0 = decl_constant_value_for_optimization (op0); if (for_int_const && TREE_CODE (op0) != INTEGER_CST) goto out; /* ??? Cope with user tricks that amount to offsetof. The middle-end is not prepared to deal with them if they occur in initializers. */ if (op0 != orig_op0 && code == ADDR_EXPR && (op1 = get_base_address (op0)) != NULL_TREE && INDIRECT_REF_P (op1) && TREE_CONSTANT (TREE_OPERAND (op1, 0))) ret = fold_convert_loc (loc, TREE_TYPE (expr), fold_offsetof_1 (op0)); else if (op0 != orig_op0 || in_init) ret = in_init ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0) : fold_build1_loc (loc, code, TREE_TYPE (expr), op0); else ret = fold (expr); if (code == INDIRECT_REF && ret != expr && INDIRECT_REF_P (ret)) { TREE_READONLY (ret) = TREE_READONLY (expr); TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr); TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr); } switch (code) { case FIX_TRUNC_EXPR: case FLOAT_EXPR: CASE_CONVERT: /* Don't warn about explicit conversions. We will already have warned about suspect implicit conversions. */ break; default: if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0)) overflow_warning (EXPR_LOCATION (expr), ret); break; } goto out; case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: /* Binary operations not necessarily evaluating both arguments. */ orig_op0 = op0 = TREE_OPERAND (expr, 0); orig_op1 = op1 = TREE_OPERAND (expr, 1); op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, for_int_const); STRIP_TYPE_NOPS (op0); unused_p = (op0 == (code == TRUTH_ANDIF_EXPR ? truthvalue_false_node : truthvalue_true_node)); c_disable_warnings (unused_p); op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, for_int_const); STRIP_TYPE_NOPS (op1); c_enable_warnings (unused_p); if (for_int_const && (TREE_CODE (op0) != INTEGER_CST /* Require OP1 be an INTEGER_CST only if it's evaluated. */ || (!unused_p && TREE_CODE (op1) != INTEGER_CST))) goto out; if (op0 != orig_op0 || op1 != orig_op1 || in_init) ret = in_init ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1) : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1); else ret = fold (expr); *maybe_const_operands &= op0_const; *maybe_const_itself &= op0_const_self; if (!(flag_isoc99 && op0_const && op0_const_self && (code == TRUTH_ANDIF_EXPR ? op0 == truthvalue_false_node : op0 == truthvalue_true_node))) *maybe_const_operands &= op1_const; if (!(op0_const && op0_const_self && (code == TRUTH_ANDIF_EXPR ? op0 == truthvalue_false_node : op0 == truthvalue_true_node))) *maybe_const_itself &= op1_const_self; goto out; case COND_EXPR: orig_op0 = op0 = TREE_OPERAND (expr, 0); orig_op1 = op1 = TREE_OPERAND (expr, 1); orig_op2 = op2 = TREE_OPERAND (expr, 2); op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self, for_int_const); STRIP_TYPE_NOPS (op0); c_disable_warnings (op0 == truthvalue_false_node); op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self, for_int_const); STRIP_TYPE_NOPS (op1); c_enable_warnings (op0 == truthvalue_false_node); c_disable_warnings (op0 == truthvalue_true_node); op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self, for_int_const); STRIP_TYPE_NOPS (op2); c_enable_warnings (op0 == truthvalue_true_node); if (for_int_const && (TREE_CODE (op0) != INTEGER_CST /* Only the evaluated operand must be an INTEGER_CST. */ || (op0 == truthvalue_true_node ? TREE_CODE (op1) != INTEGER_CST : TREE_CODE (op2) != INTEGER_CST))) goto out; if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); else ret = fold (expr); *maybe_const_operands &= op0_const; *maybe_const_itself &= op0_const_self; if (!(flag_isoc99 && op0_const && op0_const_self && op0 == truthvalue_false_node)) *maybe_const_operands &= op1_const; if (!(op0_const && op0_const_self && op0 == truthvalue_false_node)) *maybe_const_itself &= op1_const_self; if (!(flag_isoc99 && op0_const && op0_const_self && op0 == truthvalue_true_node)) *maybe_const_operands &= op2_const; if (!(op0_const && op0_const_self && op0 == truthvalue_true_node)) *maybe_const_itself &= op2_const_self; goto out; case VEC_COND_EXPR: orig_op0 = op0 = TREE_OPERAND (expr, 0); orig_op1 = op1 = TREE_OPERAND (expr, 1); orig_op2 = op2 = TREE_OPERAND (expr, 2); op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op0); op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op1); op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands, maybe_const_itself, for_int_const); STRIP_TYPE_NOPS (op2); if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2) ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2); else ret = fold (expr); goto out; case EXCESS_PRECISION_EXPR: /* Each case where an operand with excess precision may be encountered must remove the EXCESS_PRECISION_EXPR around inner operands and possibly put one around the whole expression or possibly convert to the semantic type (which c_fully_fold does); we cannot tell at this stage which is appropriate in any particular case. */ gcc_unreachable (); default: /* Various codes may appear through folding built-in functions and their arguments. */ goto out; } out: /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks have been done by this point, so remove them again. */ nowarning |= TREE_NO_WARNING (ret); STRIP_TYPE_NOPS (ret); if (nowarning && !TREE_NO_WARNING (ret)) { if (!CAN_HAVE_LOCATION_P (ret)) ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret); TREE_NO_WARNING (ret) = 1; } if (ret != expr) { protected_set_expr_location (ret, loc); if (IS_EXPR_CODE_CLASS (kind)) set_source_range (ret, old_range.m_start, old_range.m_finish); } return ret; }
int cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) { int saved_stmts_are_full_exprs_p = 0; enum tree_code code = TREE_CODE (*expr_p); enum gimplify_status ret; if (STATEMENT_CODE_P (code)) { saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p (); current_stmt_tree ()->stmts_are_full_exprs_p = STMT_IS_FULL_EXPR_P (*expr_p); } switch (code) { case PTRMEM_CST: *expr_p = cplus_expand_constant (*expr_p); ret = GS_OK; break; case AGGR_INIT_EXPR: simplify_aggr_init_expr (expr_p); ret = GS_OK; break; case VEC_INIT_EXPR: { location_t loc = input_location; tree init = VEC_INIT_EXPR_INIT (*expr_p); int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); gcc_assert (EXPR_HAS_LOCATION (*expr_p)); input_location = EXPR_LOCATION (*expr_p); *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE, init, VEC_INIT_EXPR_VALUE_INIT (*expr_p), from_array, tf_warning_or_error); cp_genericize_tree (expr_p); ret = GS_OK; input_location = loc; } break; case THROW_EXPR: /* FIXME communicate throw type to back end, probably by moving THROW_EXPR into ../tree.def. */ *expr_p = TREE_OPERAND (*expr_p, 0); ret = GS_OK; break; case MUST_NOT_THROW_EXPR: ret = gimplify_must_not_throw_expr (expr_p, pre_p); break; /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the LHS of an assignment might also be involved in the RHS, as in bug 25979. */ case INIT_EXPR: if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); cp_gimplify_init_expr (expr_p); if (TREE_CODE (*expr_p) != INIT_EXPR) return GS_OK; /* Otherwise fall through. */ case MODIFY_EXPR: { if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); /* If the back end isn't clever enough to know that the lhs and rhs types are the same, add an explicit conversion. */ tree op0 = TREE_OPERAND (*expr_p, 0); tree op1 = TREE_OPERAND (*expr_p, 1); if (!error_operand_p (op0) && !error_operand_p (op1) && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0)) || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1))) && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))) TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), op1); else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1) || (TREE_CODE (op1) == CONSTRUCTOR && CONSTRUCTOR_NELTS (op1) == 0 && !TREE_CLOBBER_P (op1)) || (TREE_CODE (op1) == CALL_EXPR && !CALL_EXPR_RETURN_SLOT_OPT (op1))) && is_really_empty_class (TREE_TYPE (op0))) { /* Remove any copies of empty classes. We check that the RHS has a simple form so that TARGET_EXPRs and non-empty CONSTRUCTORs get reduced properly, and we leave the return slot optimization alone because it isn't a copy (FIXME so it shouldn't be represented as one). Also drop volatile variables on the RHS to avoid infinite recursion from gimplify_expr trying to load the value. */ if (!TREE_SIDE_EFFECTS (op1) || (DECL_P (op1) && TREE_THIS_VOLATILE (op1))) *expr_p = op0; else if (TREE_CODE (op1) == MEM_REF && TREE_THIS_VOLATILE (op1)) { /* Similarly for volatile MEM_REFs on the RHS. */ if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0))) *expr_p = op0; else *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), TREE_OPERAND (op1, 0), op0); } else *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), op0, op1); } } ret = GS_OK; break; case EMPTY_CLASS_EXPR: /* We create an empty CONSTRUCTOR with RECORD_TYPE. */ *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL); ret = GS_OK; break; case BASELINK: *expr_p = BASELINK_FUNCTIONS (*expr_p); ret = GS_OK; break; case TRY_BLOCK: genericize_try_block (expr_p); ret = GS_OK; break; case HANDLER: genericize_catch_block (expr_p); ret = GS_OK; break; case EH_SPEC_BLOCK: genericize_eh_spec_block (expr_p); ret = GS_OK; break; case USING_STMT: gcc_unreachable (); case FOR_STMT: case WHILE_STMT: case DO_STMT: case SWITCH_STMT: case CONTINUE_STMT: case BREAK_STMT: gcc_unreachable (); case OMP_FOR: case OMP_SIMD: case OMP_DISTRIBUTE: ret = cp_gimplify_omp_for (expr_p, pre_p); break; case EXPR_STMT: gimplify_expr_stmt (expr_p); ret = GS_OK; break; case UNARY_PLUS_EXPR: { tree arg = TREE_OPERAND (*expr_p, 0); tree type = TREE_TYPE (*expr_p); *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg) : arg; ret = GS_OK; } break; case CILK_SPAWN_STMT: gcc_assert (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p)); /* If errors are seen, then just process it as a CALL_EXPR. */ if (!seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); case CALL_EXPR: if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); default: ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p); break; } /* Restore saved state. */ if (STATEMENT_CODE_P (code)) current_stmt_tree ()->stmts_are_full_exprs_p = saved_stmts_are_full_exprs_p; return ret; }
static gimple * input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in, enum LTO_tags tag) { gimple *stmt; enum gimple_code code; unsigned HOST_WIDE_INT num_ops; size_t i; struct bitpack_d bp; bool has_hist; code = lto_tag_to_gimple_code (tag); /* Read the tuple header. */ bp = streamer_read_bitpack (ib); num_ops = bp_unpack_var_len_unsigned (&bp); stmt = gimple_alloc (code, num_ops); stmt->no_warning = bp_unpack_value (&bp, 1); if (is_gimple_assign (stmt)) stmt->nontemporal_move = bp_unpack_value (&bp, 1); stmt->has_volatile_ops = bp_unpack_value (&bp, 1); has_hist = bp_unpack_value (&bp, 1); stmt->subcode = bp_unpack_var_len_unsigned (&bp); /* Read location information. Caching here makes no sense until streamer cache can handle the following gimple_set_block. */ gimple_set_location (stmt, stream_input_location_now (&bp, data_in)); /* Read lexical block reference. */ gimple_set_block (stmt, stream_read_tree (ib, data_in)); /* Read in all the operands. */ switch (code) { case GIMPLE_RESX: gimple_resx_set_region (as_a <gresx *> (stmt), streamer_read_hwi (ib)); break; case GIMPLE_EH_MUST_NOT_THROW: gimple_eh_must_not_throw_set_fndecl ( as_a <geh_mnt *> (stmt), stream_read_tree (ib, data_in)); break; case GIMPLE_EH_DISPATCH: gimple_eh_dispatch_set_region (as_a <geh_dispatch *> (stmt), streamer_read_hwi (ib)); break; case GIMPLE_ASM: { /* FIXME lto. Move most of this into a new gimple_asm_set_string(). */ gasm *asm_stmt = as_a <gasm *> (stmt); tree str; asm_stmt->ni = streamer_read_uhwi (ib); asm_stmt->no = streamer_read_uhwi (ib); asm_stmt->nc = streamer_read_uhwi (ib); asm_stmt->nl = streamer_read_uhwi (ib); str = streamer_read_string_cst (data_in, ib); asm_stmt->string = TREE_STRING_POINTER (str); } /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < num_ops; i++) { tree *opp, op = stream_read_tree (ib, data_in); gimple_set_op (stmt, i, op); if (!op) continue; opp = gimple_op_ptr (stmt, i); if (TREE_CODE (*opp) == ADDR_EXPR) opp = &TREE_OPERAND (*opp, 0); while (handled_component_p (*opp)) opp = &TREE_OPERAND (*opp, 0); /* At LTO output time we wrap all global decls in MEM_REFs to allow seamless replacement with prevailing decls. Undo this here if the prevailing decl allows for this. ??? Maybe we should simply fold all stmts. */ if (TREE_CODE (*opp) == MEM_REF && TREE_CODE (TREE_OPERAND (*opp, 0)) == ADDR_EXPR && integer_zerop (TREE_OPERAND (*opp, 1)) && (TREE_THIS_VOLATILE (*opp) == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))) && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*opp, 1))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (*opp, 1)))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0)))) *opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0); } if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) { if (gimple_call_internal_p (call_stmt)) gimple_call_set_internal_fn (call_stmt, streamer_read_enum (ib, internal_fn, IFN_LAST)); else gimple_call_set_fntype (call_stmt, stream_read_tree (ib, data_in)); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gimple_transaction_set_label (as_a <gtransaction *> (stmt), stream_read_tree (ib, data_in)); break; default: internal_error ("bytecode stream: unknown GIMPLE statement tag %s", lto_tag_name (tag)); } /* Update the properties of symbols, SSA names and labels associated with STMT. */ if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) { tree lhs = gimple_get_lhs (stmt); if (lhs && TREE_CODE (lhs) == SSA_NAME) SSA_NAME_DEF_STMT (lhs) = stmt; } else if (code == GIMPLE_ASM) { gasm *asm_stmt = as_a <gasm *> (stmt); unsigned i; for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++) { tree op = TREE_VALUE (gimple_asm_output_op (asm_stmt, i)); if (TREE_CODE (op) == SSA_NAME) SSA_NAME_DEF_STMT (op) = stmt; } } /* Reset alias information. */ if (code == GIMPLE_CALL) gimple_call_reset_alias_info (as_a <gcall *> (stmt)); /* Mark the statement modified so its operand vectors can be filled in. */ gimple_set_modified (stmt, true); if (has_hist) stream_in_histogram_value (ib, stmt); return stmt; }
tree ctor_for_folding (tree decl) { varpool_node *node, *real_node; tree real_decl; if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != CONST_DECL) return error_mark_node; if (TREE_CODE (decl) == CONST_DECL || DECL_IN_CONSTANT_POOL (decl)) return DECL_INITIAL (decl); if (TREE_THIS_VOLATILE (decl)) return error_mark_node; /* Do not care about automatic variables. Those are never initialized anyway, because gimplifier exapnds the code. */ if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) { gcc_assert (!TREE_PUBLIC (decl)); return error_mark_node; } gcc_assert (TREE_CODE (decl) == VAR_DECL); node = varpool_get_node (decl); if (node) { real_node = varpool_variable_node (node); real_decl = real_node->decl; } else real_decl = decl; /* See if we are dealing with alias. In most cases alias is just alternative symbol pointing to a given constructor. This allows us to use interposition rules of DECL constructor of REAL_NODE. However weakrefs are special by being just alternative name of their target (if defined). */ if (decl != real_decl) { gcc_assert (!DECL_INITIAL (decl) || DECL_INITIAL (decl) == error_mark_node); if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))) { node = varpool_alias_target (node); decl = node->decl; } } /* Vtables are defined by their types and must match no matter of interposition rules. */ if (DECL_VIRTUAL_P (real_decl)) { gcc_checking_assert (TREE_READONLY (real_decl)); return DECL_INITIAL (real_decl); } /* If there is no constructor, we have nothing to do. */ if (DECL_INITIAL (real_decl) == error_mark_node) return error_mark_node; /* Non-readonly alias of readonly variable is also de-facto readonly, because the variable itself is in readonly section. We also honnor READONLY flag on alias assuming that user knows what he is doing. */ if (!TREE_READONLY (decl) && !TREE_READONLY (real_decl)) return error_mark_node; /* Variables declared 'const' without an initializer have zero as the initializer if they may not be overridden at link or run time. */ if (!DECL_INITIAL (real_decl) && (DECL_EXTERNAL (decl) || decl_replaceable_p (decl))) return error_mark_node; /* Variables declared `const' with an initializer are considered to not be overwritable with different initializer by default. ??? Previously we behaved so for scalar variables but not for array accesses. */ return DECL_INITIAL (real_decl); }
static inline void unpack_ts_base_value_fields (struct bitpack_d *bp, tree expr) { /* Note that the code for EXPR has already been unpacked to create EXPR in streamer_alloc_tree. */ if (!TYPE_P (expr)) { TREE_SIDE_EFFECTS (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_CONSTANT (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_READONLY (expr) = (unsigned) bp_unpack_value (bp, 1); /* TREE_PUBLIC is used on types to indicate that the type has a TYPE_CACHED_VALUES vector. This is not streamed out, so we skip it here. */ TREE_PUBLIC (expr) = (unsigned) bp_unpack_value (bp, 1); } else bp_unpack_value (bp, 4); TREE_ADDRESSABLE (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_THIS_VOLATILE (expr) = (unsigned) bp_unpack_value (bp, 1); if (DECL_P (expr)) { DECL_UNSIGNED (expr) = (unsigned) bp_unpack_value (bp, 1); DECL_NAMELESS (expr) = (unsigned) bp_unpack_value (bp, 1); } else if (TYPE_P (expr)) TYPE_UNSIGNED (expr) = (unsigned) bp_unpack_value (bp, 1); else bp_unpack_value (bp, 1); TREE_ASM_WRITTEN (expr) = (unsigned) bp_unpack_value (bp, 1); if (TYPE_P (expr)) TYPE_ARTIFICIAL (expr) = (unsigned) bp_unpack_value (bp, 1); else TREE_NO_WARNING (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_NOTHROW (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_STATIC (expr) = (unsigned) bp_unpack_value (bp, 1); if (TREE_CODE (expr) != TREE_BINFO) TREE_PRIVATE (expr) = (unsigned) bp_unpack_value (bp, 1); else bp_unpack_value (bp, 1); TREE_PROTECTED (expr) = (unsigned) bp_unpack_value (bp, 1); TREE_DEPRECATED (expr) = (unsigned) bp_unpack_value (bp, 1); if (TYPE_P (expr)) { if (AGGREGATE_TYPE_P (expr)) TYPE_REVERSE_STORAGE_ORDER (expr) = (unsigned) bp_unpack_value (bp, 1); else TYPE_SATURATING (expr) = (unsigned) bp_unpack_value (bp, 1); TYPE_ADDR_SPACE (expr) = (unsigned) bp_unpack_value (bp, 8); } else if (TREE_CODE (expr) == BIT_FIELD_REF || TREE_CODE (expr) == MEM_REF) { REF_REVERSE_STORAGE_ORDER (expr) = (unsigned) bp_unpack_value (bp, 1); bp_unpack_value (bp, 8); } else if (TREE_CODE (expr) == SSA_NAME) { SSA_NAME_IS_DEFAULT_DEF (expr) = (unsigned) bp_unpack_value (bp, 1); bp_unpack_value (bp, 8); } else bp_unpack_value (bp, 9); }
static void gfc_init_builtin_functions (void) { enum builtin_type { #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME, #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME, #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME, #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME, #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME, #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME, #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME, #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) NAME, #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) NAME, #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) NAME, #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8, ARG9) NAME, #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8, ARG9, ARG10) NAME, #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME, #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME, #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME, #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME, #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) NAME, #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) NAME, #define DEF_POINTER_TYPE(NAME, TYPE) NAME, #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_9 #undef DEF_FUNCTION_TYPE_10 #undef DEF_FUNCTION_TYPE_11 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_FUNCTION_TYPE_VAR_1 #undef DEF_FUNCTION_TYPE_VAR_2 #undef DEF_FUNCTION_TYPE_VAR_6 #undef DEF_FUNCTION_TYPE_VAR_7 #undef DEF_POINTER_TYPE BT_LAST }; tree mfunc_float[6]; tree mfunc_double[6]; tree mfunc_longdouble[6]; tree mfunc_cfloat[6]; tree mfunc_cdouble[6]; tree mfunc_clongdouble[6]; tree func_cfloat_float, func_float_cfloat; tree func_cdouble_double, func_double_cdouble; tree func_clongdouble_longdouble, func_longdouble_clongdouble; tree func_float_floatp_floatp; tree func_double_doublep_doublep; tree func_longdouble_longdoublep_longdoublep; tree ftype, ptype; tree builtin_types[(int) BT_LAST + 1]; int attr; build_builtin_fntypes (mfunc_float, float_type_node); build_builtin_fntypes (mfunc_double, double_type_node); build_builtin_fntypes (mfunc_longdouble, long_double_type_node); build_builtin_fntypes (mfunc_cfloat, complex_float_type_node); build_builtin_fntypes (mfunc_cdouble, complex_double_type_node); build_builtin_fntypes (mfunc_clongdouble, complex_long_double_type_node); func_cfloat_float = build_function_type_list (float_type_node, complex_float_type_node, NULL_TREE); func_float_cfloat = build_function_type_list (complex_float_type_node, float_type_node, NULL_TREE); func_cdouble_double = build_function_type_list (double_type_node, complex_double_type_node, NULL_TREE); func_double_cdouble = build_function_type_list (complex_double_type_node, double_type_node, NULL_TREE); func_clongdouble_longdouble = build_function_type_list (long_double_type_node, complex_long_double_type_node, NULL_TREE); func_longdouble_clongdouble = build_function_type_list (complex_long_double_type_node, long_double_type_node, NULL_TREE); ptype = build_pointer_type (float_type_node); func_float_floatp_floatp = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (double_type_node); func_double_doublep_doublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); ptype = build_pointer_type (long_double_type_node); func_longdouble_longdoublep_longdoublep = build_function_type_list (void_type_node, ptype, ptype, NULL_TREE); /* Non-math builtins are defined manually, so they're not included here. */ #define OTHER_BUILTIN(ID,NAME,TYPE,CONST) #include "mathbuiltins.def" gfc_define_builtin ("__builtin_roundl", mfunc_longdouble[0], BUILT_IN_ROUNDL, "roundl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_round", mfunc_double[0], BUILT_IN_ROUND, "round", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_roundf", mfunc_float[0], BUILT_IN_ROUNDF, "roundf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncl", mfunc_longdouble[0], BUILT_IN_TRUNCL, "truncl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_trunc", mfunc_double[0], BUILT_IN_TRUNC, "trunc", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_truncf", mfunc_float[0], BUILT_IN_TRUNCF, "truncf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsl", func_clongdouble_longdouble, BUILT_IN_CABSL, "cabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabs", func_cdouble_double, BUILT_IN_CABS, "cabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cabsf", func_cfloat_float, BUILT_IN_CABSF, "cabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignl", mfunc_longdouble[1], BUILT_IN_COPYSIGNL, "copysignl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysign", mfunc_double[1], BUILT_IN_COPYSIGN, "copysign", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_copysignf", mfunc_float[1], BUILT_IN_COPYSIGNF, "copysignf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterl", mfunc_longdouble[1], BUILT_IN_NEXTAFTERL, "nextafterl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafter", mfunc_double[1], BUILT_IN_NEXTAFTER, "nextafter", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_nextafterf", mfunc_float[1], BUILT_IN_NEXTAFTERF, "nextafterf", ATTR_CONST_NOTHROW_LEAF_LIST); /* Some built-ins depend on rounding mode. Depending on compilation options, they will be "pure" or "const". */ attr = flag_rounding_math ? ATTR_PURE_NOTHROW_LEAF_LIST : ATTR_CONST_NOTHROW_LEAF_LIST; gfc_define_builtin ("__builtin_rintl", mfunc_longdouble[0], BUILT_IN_RINTL, "rintl", attr); gfc_define_builtin ("__builtin_rint", mfunc_double[0], BUILT_IN_RINT, "rint", attr); gfc_define_builtin ("__builtin_rintf", mfunc_float[0], BUILT_IN_RINTF, "rintf", attr); gfc_define_builtin ("__builtin_remainderl", mfunc_longdouble[1], BUILT_IN_REMAINDERL, "remainderl", attr); gfc_define_builtin ("__builtin_remainder", mfunc_double[1], BUILT_IN_REMAINDER, "remainder", attr); gfc_define_builtin ("__builtin_remainderf", mfunc_float[1], BUILT_IN_REMAINDERF, "remainderf", attr); gfc_define_builtin ("__builtin_logbl", mfunc_longdouble[0], BUILT_IN_LOGBL, "logbl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_logb", mfunc_double[0], BUILT_IN_LOGB, "logb", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_logbf", mfunc_float[0], BUILT_IN_LOGBF, "logbf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpl", mfunc_longdouble[4], BUILT_IN_FREXPL, "frexpl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexp", mfunc_double[4], BUILT_IN_FREXP, "frexp", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_frexpf", mfunc_float[4], BUILT_IN_FREXPF, "frexpf", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsl", mfunc_longdouble[0], BUILT_IN_FABSL, "fabsl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabs", mfunc_double[0], BUILT_IN_FABS, "fabs", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fabsf", mfunc_float[0], BUILT_IN_FABSF, "fabsf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnl", mfunc_longdouble[2], BUILT_IN_SCALBNL, "scalbnl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbn", mfunc_double[2], BUILT_IN_SCALBN, "scalbn", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_scalbnf", mfunc_float[2], BUILT_IN_SCALBNF, "scalbnf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodl", mfunc_longdouble[1], BUILT_IN_FMODL, "fmodl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmod", mfunc_double[1], BUILT_IN_FMOD, "fmod", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_fmodf", mfunc_float[1], BUILT_IN_FMODF, "fmodf", ATTR_CONST_NOTHROW_LEAF_LIST); /* iround{f,,l}, lround{f,,l} and llround{f,,l} */ ftype = build_function_type_list (integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundf", ftype, BUILT_IN_IROUNDF, "iroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundf", ftype, BUILT_IN_LROUNDF, "lroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, float_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundf", ftype, BUILT_IN_LLROUNDF, "llroundf", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iround", ftype, BUILT_IN_IROUND, "iround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lround", ftype, BUILT_IN_LROUND, "lround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llround", ftype, BUILT_IN_LLROUND, "llround", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin("__builtin_iroundl", ftype, BUILT_IN_IROUNDL, "iroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_lroundl", ftype, BUILT_IN_LROUNDL, "lroundl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (long_long_integer_type_node, long_double_type_node, NULL_TREE); gfc_define_builtin ("__builtin_llroundl", ftype, BUILT_IN_LLROUNDL, "llroundl", ATTR_CONST_NOTHROW_LEAF_LIST); /* These are used to implement the ** operator. */ gfc_define_builtin ("__builtin_powl", mfunc_longdouble[1], BUILT_IN_POWL, "powl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_pow", mfunc_double[1], BUILT_IN_POW, "pow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powf", mfunc_float[1], BUILT_IN_POWF, "powf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowl", mfunc_clongdouble[1], BUILT_IN_CPOWL, "cpowl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpow", mfunc_cdouble[1], BUILT_IN_CPOW, "cpow", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cpowf", mfunc_cfloat[1], BUILT_IN_CPOWF, "cpowf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powil", mfunc_longdouble[2], BUILT_IN_POWIL, "powil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powi", mfunc_double[2], BUILT_IN_POWI, "powi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_powif", mfunc_float[2], BUILT_IN_POWIF, "powif", ATTR_CONST_NOTHROW_LEAF_LIST); if (targetm.libc_has_function (function_c99_math_complex)) { gfc_define_builtin ("__builtin_cbrtl", mfunc_longdouble[0], BUILT_IN_CBRTL, "cbrtl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrt", mfunc_double[0], BUILT_IN_CBRT, "cbrt", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cbrtf", mfunc_float[0], BUILT_IN_CBRTF, "cbrtf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpil", func_longdouble_clongdouble, BUILT_IN_CEXPIL, "cexpil", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpi", func_double_cdouble, BUILT_IN_CEXPI, "cexpi", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_cexpif", func_float_cfloat, BUILT_IN_CEXPIF, "cexpif", ATTR_CONST_NOTHROW_LEAF_LIST); } if (targetm.libc_has_function (function_sincos)) { gfc_define_builtin ("__builtin_sincosl", func_longdouble_longdoublep_longdoublep, BUILT_IN_SINCOSL, "sincosl", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincos", func_double_doublep_doublep, BUILT_IN_SINCOS, "sincos", ATTR_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_sincosf", func_float_floatp_floatp, BUILT_IN_SINCOSF, "sincosf", ATTR_NOTHROW_LEAF_LIST); } /* For LEADZ, TRAILZ, POPCNT and POPPAR. */ ftype = build_function_type_list (integer_type_node, unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "__builtin_clz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctz", ftype, BUILT_IN_CTZ, "__builtin_ctz", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parity", ftype, BUILT_IN_PARITY, "__builtin_parity", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcount", ftype, BUILT_IN_POPCOUNT, "__builtin_popcount", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "__builtin_clzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzl", ftype, BUILT_IN_CTZL, "__builtin_ctzl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityl", ftype, BUILT_IN_PARITYL, "__builtin_parityl", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountl", ftype, BUILT_IN_POPCOUNTL, "__builtin_popcountl", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (integer_type_node, long_long_unsigned_type_node, NULL_TREE); gfc_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "__builtin_clzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_ctzll", ftype, BUILT_IN_CTZLL, "__builtin_ctzll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_parityll", ftype, BUILT_IN_PARITYLL, "__builtin_parityll", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_popcountll", ftype, BUILT_IN_POPCOUNTLL, "__builtin_popcountll", ATTR_CONST_NOTHROW_LEAF_LIST); /* Other builtin functions we use. */ ftype = build_function_type_list (long_integer_type_node, long_integer_type_node, long_integer_type_node, NULL_TREE); gfc_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT, "__builtin_expect", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type_list (void_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_free", ftype, BUILT_IN_FREE, "free", ATTR_NOTHROW_LEAF_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_malloc", ftype, BUILT_IN_MALLOC, "malloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); ftype = build_function_type_list (pvoid_type_node, size_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_calloc", ftype, BUILT_IN_CALLOC, "calloc", ATTR_NOTHROW_LEAF_MALLOC_LIST); DECL_IS_MALLOC (builtin_decl_explicit (BUILT_IN_CALLOC)) = 1; ftype = build_function_type_list (pvoid_type_node, size_type_node, pvoid_type_node, NULL_TREE); gfc_define_builtin ("__builtin_realloc", ftype, BUILT_IN_REALLOC, "realloc", ATTR_NOTHROW_LEAF_LIST); /* Type-generic floating-point classification built-ins. */ ftype = build_function_type (integer_type_node, NULL_TREE); gfc_define_builtin ("__builtin_isfinite", ftype, BUILT_IN_ISFINITE, "__builtin_isfinite", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isinf", ftype, BUILT_IN_ISINF, "__builtin_isinf", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isinf_sign", ftype, BUILT_IN_ISINF_SIGN, "__builtin_isinf_sign", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isnan", ftype, BUILT_IN_ISNAN, "__builtin_isnan", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isnormal", ftype, BUILT_IN_ISNORMAL, "__builtin_isnormal", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_signbit", ftype, BUILT_IN_SIGNBIT, "__builtin_signbit", ATTR_CONST_NOTHROW_LEAF_LIST); ftype = build_function_type (integer_type_node, NULL_TREE); gfc_define_builtin ("__builtin_isless", ftype, BUILT_IN_ISLESS, "__builtin_isless", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_islessequal", ftype, BUILT_IN_ISLESSEQUAL, "__builtin_islessequal", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_islessgreater", ftype, BUILT_IN_ISLESSGREATER, "__builtin_islessgreater", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isgreater", ftype, BUILT_IN_ISGREATER, "__builtin_isgreater", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isgreaterequal", ftype, BUILT_IN_ISGREATEREQUAL, "__builtin_isgreaterequal", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__builtin_isunordered", ftype, BUILT_IN_ISUNORDERED, "__builtin_isunordered", ATTR_CONST_NOTHROW_LEAF_LIST); #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \ builtin_types[(int) ENUM] = VALUE; #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ NULL_TREE); #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ NULL_TREE); #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ NULL_TREE); #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ NULL_TREE); #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ NULL_TREE); #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ NULL_TREE); #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ NULL_TREE); #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ NULL_TREE); #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7, ARG8, ARG9) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ builtin_types[(int) ARG9], \ NULL_TREE); #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, \ ARG5, ARG6, ARG7, ARG8, ARG9, ARG10) \ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ builtin_types[(int) ARG9], \ builtin_types[(int) ARG10], \ NULL_TREE); #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, \ ARG5, ARG6, ARG7, ARG8, ARG9, ARG10, ARG11)\ builtin_types[(int) ENUM] \ = build_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ builtin_types[(int) ARG8], \ builtin_types[(int) ARG9], \ builtin_types[(int) ARG10], \ builtin_types[(int) ARG11], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ NULL_TREE); #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \ ARG6, ARG7) \ builtin_types[(int) ENUM] \ = build_varargs_function_type_list (builtin_types[(int) RETURN], \ builtin_types[(int) ARG1], \ builtin_types[(int) ARG2], \ builtin_types[(int) ARG3], \ builtin_types[(int) ARG4], \ builtin_types[(int) ARG5], \ builtin_types[(int) ARG6], \ builtin_types[(int) ARG7], \ NULL_TREE); #define DEF_POINTER_TYPE(ENUM, TYPE) \ builtin_types[(int) ENUM] \ = build_pointer_type (builtin_types[(int) TYPE]); #include "types.def" #undef DEF_PRIMITIVE_TYPE #undef DEF_FUNCTION_TYPE_0 #undef DEF_FUNCTION_TYPE_1 #undef DEF_FUNCTION_TYPE_2 #undef DEF_FUNCTION_TYPE_3 #undef DEF_FUNCTION_TYPE_4 #undef DEF_FUNCTION_TYPE_5 #undef DEF_FUNCTION_TYPE_6 #undef DEF_FUNCTION_TYPE_7 #undef DEF_FUNCTION_TYPE_8 #undef DEF_FUNCTION_TYPE_10 #undef DEF_FUNCTION_TYPE_VAR_0 #undef DEF_FUNCTION_TYPE_VAR_1 #undef DEF_FUNCTION_TYPE_VAR_2 #undef DEF_FUNCTION_TYPE_VAR_6 #undef DEF_FUNCTION_TYPE_VAR_7 #undef DEF_POINTER_TYPE builtin_types[(int) BT_LAST] = NULL_TREE; /* Initialize synchronization builtins. */ #undef DEF_SYNC_BUILTIN #define DEF_SYNC_BUILTIN(code, name, type, attr) \ gfc_define_builtin (name, builtin_types[type], code, name, \ attr); #include "../sync-builtins.def" #undef DEF_SYNC_BUILTIN if (flag_openacc) { #undef DEF_GOACC_BUILTIN #define DEF_GOACC_BUILTIN(code, name, type, attr) \ gfc_define_builtin ("__builtin_" name, builtin_types[type], \ code, name, attr); #undef DEF_GOACC_BUILTIN_COMPILER #define DEF_GOACC_BUILTIN_COMPILER(code, name, type, attr) \ gfc_define_builtin (name, builtin_types[type], code, name, attr); #undef DEF_GOMP_BUILTIN #define DEF_GOMP_BUILTIN(code, name, type, attr) /* ignore */ #include "../omp-builtins.def" #undef DEF_GOACC_BUILTIN #undef DEF_GOACC_BUILTIN_COMPILER #undef DEF_GOMP_BUILTIN } if (flag_openmp || flag_openmp_simd || flag_tree_parallelize_loops) { #undef DEF_GOACC_BUILTIN #define DEF_GOACC_BUILTIN(code, name, type, attr) /* ignore */ #undef DEF_GOACC_BUILTIN_COMPILER #define DEF_GOACC_BUILTIN_COMPILER(code, name, type, attr) /* ignore */ #undef DEF_GOMP_BUILTIN #define DEF_GOMP_BUILTIN(code, name, type, attr) \ gfc_define_builtin ("__builtin_" name, builtin_types[type], \ code, name, attr); #include "../omp-builtins.def" #undef DEF_GOACC_BUILTIN #undef DEF_GOACC_BUILTIN_COMPILER #undef DEF_GOMP_BUILTIN } #ifdef ENABLE_HSA if (!flag_disable_hsa) { #undef DEF_HSA_BUILTIN #define DEF_HSA_BUILTIN(code, name, type, attr) \ gfc_define_builtin ("__builtin_" name, builtin_types[type], \ code, name, attr); #include "../hsa-builtins.def" } #endif gfc_define_builtin ("__builtin_trap", builtin_types[BT_FN_VOID], BUILT_IN_TRAP, NULL, ATTR_NOTHROW_LEAF_LIST); TREE_THIS_VOLATILE (builtin_decl_explicit (BUILT_IN_TRAP)) = 1; ftype = build_varargs_function_type_list (ptr_type_node, const_ptr_type_node, size_type_node, NULL_TREE); gfc_define_builtin ("__builtin_assume_aligned", ftype, BUILT_IN_ASSUME_ALIGNED, "__builtin_assume_aligned", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__emutls_get_address", builtin_types[BT_FN_PTR_PTR], BUILT_IN_EMUTLS_GET_ADDRESS, "__emutls_get_address", ATTR_CONST_NOTHROW_LEAF_LIST); gfc_define_builtin ("__emutls_register_common", builtin_types[BT_FN_VOID_PTR_WORD_WORD_PTR], BUILT_IN_EMUTLS_REGISTER_COMMON, "__emutls_register_common", ATTR_NOTHROW_LEAF_LIST); build_common_builtin_nodes (); targetm.init_builtins (); }
static unsigned int tree_nrv (void) { tree result = DECL_RESULT (current_function_decl); tree result_type = TREE_TYPE (result); tree found = NULL; basic_block bb; gimple_stmt_iterator gsi; struct nrv_data data; /* If this function does not return an aggregate type in memory, then there is nothing to do. */ if (!aggregate_value_p (result, current_function_decl)) return 0; /* If a GIMPLE type is returned in memory, finalize_nrv_r might create non-GIMPLE. */ if (is_gimple_reg_type (result_type)) return 0; /* Look through each block for assignments to the RESULT_DECL. */ FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); tree ret_val; if (gimple_code (stmt) == GIMPLE_RETURN) { /* In a function with an aggregate return value, the gimplifier has changed all non-empty RETURN_EXPRs to return the RESULT_DECL. */ ret_val = gimple_return_retval (stmt); if (ret_val) gcc_assert (ret_val == result); } else if (is_gimple_assign (stmt) && gimple_assign_lhs (stmt) == result) { tree rhs; if (!gimple_assign_copy_p (stmt)) return 0; rhs = gimple_assign_rhs1 (stmt); /* Now verify that this return statement uses the same value as any previously encountered return statement. */ if (found != NULL) { /* If we found a return statement using a different variable than previous return statements, then we can not perform NRV optimizations. */ if (found != rhs) return 0; } else found = rhs; /* The returned value must be a local automatic variable of the same type and alignment as the function's result. */ if (TREE_CODE (found) != VAR_DECL || TREE_THIS_VOLATILE (found) || DECL_CONTEXT (found) != current_function_decl || TREE_STATIC (found) || TREE_ADDRESSABLE (found) || DECL_ALIGN (found) > DECL_ALIGN (result) || !useless_type_conversion_p (result_type, TREE_TYPE (found))) return 0; } else if (is_gimple_assign (stmt)) { tree addr = get_base_address (gimple_assign_lhs (stmt)); /* If there's any MODIFY of component of RESULT, then bail out. */ if (addr && addr == result) return 0; } } } if (!found) return 0; /* If dumping details, then note once and only the NRV replacement. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "NRV Replaced: "); print_generic_expr (dump_file, found, dump_flags); fprintf (dump_file, " with: "); print_generic_expr (dump_file, result, dump_flags); fprintf (dump_file, "\n"); } /* At this point we know that all the return statements return the same local which has suitable attributes for NRV. Copy debugging information from FOUND to RESULT. */ DECL_NAME (result) = DECL_NAME (found); DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found); DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found); TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found); /* Now walk through the function changing all references to VAR to be RESULT. */ data.var = found; data.result = result; FOR_EACH_BB (bb) { for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); ) { gimple stmt = gsi_stmt (gsi); /* If this is a copy from VAR to RESULT, remove it. */ if (gimple_assign_copy_p (stmt) && gimple_assign_lhs (stmt) == result && gimple_assign_rhs1 (stmt) == found) gsi_remove (&gsi, true); else { struct walk_stmt_info wi; memset (&wi, 0, sizeof (wi)); wi.info = &data; walk_gimple_op (stmt, finalize_nrv_r, &wi); gsi_next (&gsi); } } } /* FOUND is no longer used. Ensure it gets removed. */ var_ann (found)->used = 0; return 0; }
tree make_thunk (tree function, bool this_adjusting, tree fixed_offset, tree virtual_offset) { HOST_WIDE_INT d; tree thunk; gcc_assert (TREE_CODE (function) == FUNCTION_DECL); /* We can have this thunks to covariant thunks, but not vice versa. */ gcc_assert (!DECL_THIS_THUNK_P (function)); gcc_assert (!DECL_RESULT_THUNK_P (function) || this_adjusting); /* Scale the VIRTUAL_OFFSET to be in terms of bytes. */ if (this_adjusting && virtual_offset) virtual_offset = size_binop (MULT_EXPR, virtual_offset, convert (ssizetype, TYPE_SIZE_UNIT (vtable_entry_type))); d = tree_low_cst (fixed_offset, 0); /* See if we already have the thunk in question. For this_adjusting thunks VIRTUAL_OFFSET will be an INTEGER_CST, for covariant thunks it will be a BINFO. */ for (thunk = DECL_THUNKS (function); thunk; thunk = TREE_CHAIN (thunk)) if (DECL_THIS_THUNK_P (thunk) == this_adjusting && THUNK_FIXED_OFFSET (thunk) == d && !virtual_offset == !THUNK_VIRTUAL_OFFSET (thunk) && (!virtual_offset || (this_adjusting ? tree_int_cst_equal (THUNK_VIRTUAL_OFFSET (thunk), virtual_offset) : THUNK_VIRTUAL_OFFSET (thunk) == virtual_offset))) return thunk; /* All thunks must be created before FUNCTION is actually emitted; the ABI requires that all thunks be emitted together with the function to which they transfer control. */ gcc_assert (!TREE_ASM_WRITTEN (function)); /* Likewise, we can only be adding thunks to a function declared in the class currently being laid out. */ gcc_assert (TYPE_SIZE (DECL_CONTEXT (function)) && TYPE_BEING_DEFINED (DECL_CONTEXT (function))); thunk = build_decl (FUNCTION_DECL, NULL_TREE, TREE_TYPE (function)); DECL_LANG_SPECIFIC (thunk) = DECL_LANG_SPECIFIC (function); cxx_dup_lang_specific_decl (thunk); DECL_THUNKS (thunk) = NULL_TREE; DECL_CONTEXT (thunk) = DECL_CONTEXT (function); TREE_READONLY (thunk) = TREE_READONLY (function); TREE_THIS_VOLATILE (thunk) = TREE_THIS_VOLATILE (function); TREE_PUBLIC (thunk) = TREE_PUBLIC (function); SET_DECL_THUNK_P (thunk, this_adjusting); THUNK_TARGET (thunk) = function; THUNK_FIXED_OFFSET (thunk) = d; THUNK_VIRTUAL_OFFSET (thunk) = virtual_offset; THUNK_ALIAS (thunk) = NULL_TREE; /* The thunk itself is not a constructor or destructor, even if the thing it is thunking to is. */ DECL_INTERFACE_KNOWN (thunk) = 1; DECL_NOT_REALLY_EXTERN (thunk) = 1; DECL_SAVED_FUNCTION_DATA (thunk) = NULL; DECL_DESTRUCTOR_P (thunk) = 0; DECL_CONSTRUCTOR_P (thunk) = 0; DECL_EXTERNAL (thunk) = 1; DECL_ARTIFICIAL (thunk) = 1; /* Even if this thunk is a member of a local class, we don't need a static chain. */ DECL_NO_STATIC_CHAIN (thunk) = 1; /* The THUNK is not a pending inline, even if the FUNCTION is. */ DECL_PENDING_INLINE_P (thunk) = 0; DECL_INLINE (thunk) = 0; DECL_DECLARED_INLINE_P (thunk) = 0; /* Nor has it been deferred. */ DECL_DEFERRED_FN (thunk) = 0; /* Nor is it a template instantiation. */ DECL_USE_TEMPLATE (thunk) = 0; DECL_TEMPLATE_INFO (thunk) = NULL; /* Add it to the list of thunks associated with FUNCTION. */ TREE_CHAIN (thunk) = DECL_THUNKS (function); DECL_THUNKS (function) = thunk; return thunk; }
static unsigned int ipa_comdats (void) { hash_map<symtab_node *, tree> map (251); hash_map<tree, symtab_node *> comdat_head_map (251); symtab_node *symbol; bool comdat_group_seen = false; symtab_node *first = (symtab_node *) (void *) 1; tree group; /* Start the dataflow by assigning comdat group to symbols that are in comdat groups already. All other externally visible symbols must stay, we use ERROR_MARK_NODE as bottom for the propagation. */ FOR_EACH_DEFINED_SYMBOL (symbol) if (!symbol->real_symbol_p ()) ; else if ((group = symbol->get_comdat_group ()) != NULL) { map.put (symbol, group); comdat_head_map.put (group, symbol); comdat_group_seen = true; /* Mark the symbol so we won't waste time visiting it for dataflow. */ symbol->aux = (symtab_node *) (void *) 1; } /* See symbols that can not be privatized to comdats; that is externally visible symbols or otherwise used ones. We also do not want to mangle user section names. */ else if (symbol->externally_visible || symbol->force_output || symbol->used_from_other_partition || TREE_THIS_VOLATILE (symbol->decl) || symbol->get_section () || (TREE_CODE (symbol->decl) == FUNCTION_DECL && (DECL_STATIC_CONSTRUCTOR (symbol->decl) || DECL_STATIC_DESTRUCTOR (symbol->decl)))) { map.put (symbol->ultimate_alias_target (), error_mark_node); /* Mark the symbol so we won't waste time visiting it for dataflow. */ symbol->aux = (symtab_node *) (void *) 1; } else { /* Enqueue symbol for dataflow. */ symbol->aux = first; first = symbol; } if (!comdat_group_seen) { FOR_EACH_DEFINED_SYMBOL (symbol) symbol->aux = NULL; return 0; } /* The actual dataflow. */ while (first != (void *) 1) { tree group = NULL; tree newgroup, *val; symbol = first; first = (symtab_node *)first->aux; /* Get current lattice value of SYMBOL. */ val = map.get (symbol); if (val) group = *val; /* If it is bottom, there is nothing to do; do not clear AUX so we won't re-queue the symbol. */ if (group == error_mark_node) continue; newgroup = propagate_comdat_group (symbol, group, map); /* If nothing changed, proceed to next symbol. */ if (newgroup == group) { symbol->aux = NULL; continue; } /* Update lattice value and enqueue all references for re-visiting. */ gcc_assert (newgroup); if (val) *val = newgroup; else map.put (symbol, newgroup); enqueue_references (&first, symbol); /* We may need to revisit the symbol unless it is BOTTOM. */ if (newgroup != error_mark_node) symbol->aux = NULL; } /* Finally assign symbols to the sections. */ FOR_EACH_DEFINED_SYMBOL (symbol) { struct cgraph_node *fun; symbol->aux = NULL; if (!symbol->get_comdat_group () && !symbol->alias /* Thunks to external functions do not need to be categorized. */ && (!(fun = dyn_cast <cgraph_node *> (symbol)) || !fun->thunk.thunk_p || fun->function_symbol ()->definition) && symbol->real_symbol_p ()) { tree *val = map.get (symbol); /* A NULL here means that SYMBOL is unreachable in the definition of ipa-comdats. Either ipa-comdats is wrong about this or someone forgot to cleanup and remove unreachable functions earlier. */ gcc_assert (val); tree group = *val; if (group == error_mark_node) continue; if (dump_file) { fprintf (dump_file, "Localizing symbol\n"); symbol->dump (dump_file); fprintf (dump_file, "To group: %s\n", IDENTIFIER_POINTER (group)); } symbol->call_for_symbol_and_aliases (set_comdat_group, *comdat_head_map.get (group), true); } } return 0; }
tree maybe_fold_tmr (tree ref) { struct mem_address addr; bool changed = false; tree new_ref, off; get_address_description (ref, &addr); if (addr.base && TREE_CODE (addr.base) == INTEGER_CST && !integer_zerop (addr.base)) { addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, addr.base); addr.base = NULL_TREE; changed = true; } if (addr.symbol && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF) { addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1)); addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0); changed = true; } else if (addr.symbol && handled_component_p (TREE_OPERAND (addr.symbol, 0))) { HOST_WIDE_INT offset; addr.symbol = build_fold_addr_expr (get_addr_base_and_unit_offset (TREE_OPERAND (addr.symbol, 0), &offset)); addr.offset = int_const_binop (PLUS_EXPR, addr.offset, size_int (offset)); changed = true; } if (addr.index && TREE_CODE (addr.index) == INTEGER_CST) { off = addr.index; if (addr.step) { off = fold_binary_to_constant (MULT_EXPR, sizetype, off, addr.step); addr.step = NULL_TREE; } addr.offset = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (addr.offset), addr.offset, off); addr.index = NULL_TREE; changed = true; } if (!changed) return NULL_TREE; /* If we have propagated something into this TARGET_MEM_REF and thus ended up folding it, always create a new TARGET_MEM_REF regardless if it is valid in this for on the target - the propagation result wouldn't be anyway. */ new_ref = create_mem_ref_raw (TREE_TYPE (ref), TREE_TYPE (addr.offset), &addr, false); TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref); TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref); return new_ref; }
static void tree_nrv (void) { tree result = DECL_RESULT (current_function_decl); tree result_type = TREE_TYPE (result); tree found = NULL; basic_block bb; block_stmt_iterator bsi; struct nrv_data data; /* If this function does not return an aggregate type in memory, then there is nothing to do. */ if (!aggregate_value_p (result, current_function_decl)) return; /* Look through each block for assignments to the RESULT_DECL. */ FOR_EACH_BB (bb) { for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) { tree stmt = bsi_stmt (bsi); tree ret_expr; if (TREE_CODE (stmt) == RETURN_EXPR) { /* In a function with an aggregate return value, the gimplifier has changed all non-empty RETURN_EXPRs to return the RESULT_DECL. */ ret_expr = TREE_OPERAND (stmt, 0); if (ret_expr) gcc_assert (ret_expr == result); } else if (TREE_CODE (stmt) == MODIFY_EXPR && TREE_OPERAND (stmt, 0) == result) { ret_expr = TREE_OPERAND (stmt, 1); /* Now verify that this return statement uses the same value as any previously encountered return statement. */ if (found != NULL) { /* If we found a return statement using a different variable than previous return statements, then we can not perform NRV optimizations. */ if (found != ret_expr) return; } else found = ret_expr; /* The returned value must be a local automatic variable of the same type and alignment as the function's result. */ if (TREE_CODE (found) != VAR_DECL || TREE_THIS_VOLATILE (found) || DECL_CONTEXT (found) != current_function_decl || TREE_STATIC (found) || TREE_ADDRESSABLE (found) || DECL_ALIGN (found) > DECL_ALIGN (result) || !lang_hooks.types_compatible_p (TREE_TYPE (found), result_type)) return; } } } if (!found) return; /* If dumping details, then note once and only the NRV replacement. */ if (dump_file && (dump_flags & TDF_DETAILS)) { fprintf (dump_file, "NRV Replaced: "); print_generic_expr (dump_file, found, dump_flags); fprintf (dump_file, " with: "); print_generic_expr (dump_file, result, dump_flags); fprintf (dump_file, "\n"); } /* At this point we know that all the return statements return the same local which has suitable attributes for NRV. Copy debugging information from FOUND to RESULT. */ DECL_NAME (result) = DECL_NAME (found); DECL_SOURCE_LOCATION (result) = DECL_SOURCE_LOCATION (found); DECL_ABSTRACT_ORIGIN (result) = DECL_ABSTRACT_ORIGIN (found); TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (found); /* Now walk through the function changing all references to VAR to be RESULT. */ data.var = found; data.result = result; FOR_EACH_BB (bb) { for (bsi = bsi_start (bb); !bsi_end_p (bsi); ) { tree *tp = bsi_stmt_ptr (bsi); /* If this is a copy from VAR to RESULT, remove it. */ if (TREE_CODE (*tp) == MODIFY_EXPR && TREE_OPERAND (*tp, 0) == result && TREE_OPERAND (*tp, 1) == found) bsi_remove (&bsi); else { walk_tree (tp, finalize_nrv_r, &data, 0); bsi_next (&bsi); } } } /* FOUND is no longer used. Ensure it gets removed. */ var_ann (found)->used = 0; }
static inline void pack_ts_base_value_fields (struct bitpack_d *bp, tree expr) { bp_pack_value (bp, TREE_CODE (expr), 16); if (!TYPE_P (expr)) { bp_pack_value (bp, TREE_SIDE_EFFECTS (expr), 1); bp_pack_value (bp, TREE_CONSTANT (expr), 1); bp_pack_value (bp, TREE_READONLY (expr), 1); /* TREE_PUBLIC is used on types to indicate that the type has a TYPE_CACHED_VALUES vector. This is not streamed out, so we skip it here. */ bp_pack_value (bp, TREE_PUBLIC (expr), 1); } else bp_pack_value (bp, 0, 4); bp_pack_value (bp, TREE_ADDRESSABLE (expr), 1); bp_pack_value (bp, TREE_THIS_VOLATILE (expr), 1); if (DECL_P (expr)) bp_pack_value (bp, DECL_UNSIGNED (expr), 1); else if (TYPE_P (expr)) bp_pack_value (bp, TYPE_UNSIGNED (expr), 1); else bp_pack_value (bp, 0, 1); /* We write debug info two times, do not confuse the second one. The only relevant TREE_ASM_WRITTEN use is on SSA names. */ bp_pack_value (bp, (TREE_CODE (expr) != SSA_NAME ? 0 : TREE_ASM_WRITTEN (expr)), 1); if (TYPE_P (expr)) bp_pack_value (bp, TYPE_ARTIFICIAL (expr), 1); else bp_pack_value (bp, TREE_NO_WARNING (expr), 1); bp_pack_value (bp, TREE_NOTHROW (expr), 1); bp_pack_value (bp, TREE_STATIC (expr), 1); if (TREE_CODE (expr) != TREE_BINFO) bp_pack_value (bp, TREE_PRIVATE (expr), 1); else bp_pack_value (bp, 0, 1); bp_pack_value (bp, TREE_PROTECTED (expr), 1); bp_pack_value (bp, TREE_DEPRECATED (expr), 1); if (TYPE_P (expr)) { if (AGGREGATE_TYPE_P (expr)) bp_pack_value (bp, TYPE_REVERSE_STORAGE_ORDER (expr), 1); else bp_pack_value (bp, TYPE_SATURATING (expr), 1); bp_pack_value (bp, TYPE_ADDR_SPACE (expr), 8); } else if (TREE_CODE (expr) == BIT_FIELD_REF || TREE_CODE (expr) == MEM_REF) { bp_pack_value (bp, REF_REVERSE_STORAGE_ORDER (expr), 1); bp_pack_value (bp, 0, 8); } else if (TREE_CODE (expr) == SSA_NAME) { bp_pack_value (bp, SSA_NAME_IS_DEFAULT_DEF (expr), 1); bp_pack_value (bp, 0, 8); } else bp_pack_value (bp, 0, 9); }
void print_node (FILE *file, const char *prefix, tree node, int indent) { int hash; struct bucket *b; machine_mode mode; enum tree_code_class tclass; int len; int i; expanded_location xloc; enum tree_code code; if (node == 0) return; code = TREE_CODE (node); tclass = TREE_CODE_CLASS (code); /* Don't get too deep in nesting. If the user wants to see deeper, it is easy to use the address of a lowest-level node as an argument in another call to debug_tree. */ if (indent > 24) { print_node_brief (file, prefix, node, indent); return; } if (indent > 8 && (tclass == tcc_type || tclass == tcc_declaration)) { print_node_brief (file, prefix, node, indent); return; } /* It is unsafe to look at any other fields of an ERROR_MARK node. */ if (code == ERROR_MARK) { print_node_brief (file, prefix, node, indent); return; } /* Allow this function to be called if the table is not there. */ if (table) { hash = ((uintptr_t) node) % HASH_SIZE; /* If node is in the table, just mention its address. */ for (b = table[hash]; b; b = b->next) if (b->node == node) { print_node_brief (file, prefix, node, indent); return; } /* Add this node to the table. */ b = XNEW (struct bucket); b->node = node; b->next = table[hash]; table[hash] = b; } /* Indent to the specified column, since this is the long form. */ indent_to (file, indent); /* Print the slot this node is in, and its code, and address. */ fprintf (file, "%s <%s", prefix, get_tree_code_name (code)); dump_addr (file, " ", node); /* Print the name, if any. */ if (tclass == tcc_declaration) { if (DECL_NAME (node)) fprintf (file, " %s", IDENTIFIER_POINTER (DECL_NAME (node))); else if (code == LABEL_DECL && LABEL_DECL_UID (node) != -1) { if (dump_flags & TDF_NOUID) fprintf (file, " L.xxxx"); else fprintf (file, " L.%d", (int) LABEL_DECL_UID (node)); } else { if (dump_flags & TDF_NOUID) fprintf (file, " %c.xxxx", code == CONST_DECL ? 'C' : 'D'); else fprintf (file, " %c.%u", code == CONST_DECL ? 'C' : 'D', DECL_UID (node)); } } else if (tclass == tcc_type) { if (TYPE_NAME (node)) { if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE) fprintf (file, " %s", IDENTIFIER_POINTER (TYPE_NAME (node))); else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL && DECL_NAME (TYPE_NAME (node))) fprintf (file, " %s", IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (node)))); } } if (code == IDENTIFIER_NODE) fprintf (file, " %s", IDENTIFIER_POINTER (node)); if (code == INTEGER_CST) { if (indent <= 4) print_node_brief (file, "type", TREE_TYPE (node), indent + 4); } else if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) { print_node (file, "type", TREE_TYPE (node), indent + 4); if (TREE_TYPE (node)) indent_to (file, indent + 3); } if (!TYPE_P (node) && TREE_SIDE_EFFECTS (node)) fputs (" side-effects", file); if (TYPE_P (node) ? TYPE_READONLY (node) : TREE_READONLY (node)) fputs (" readonly", file); if (TYPE_P (node) && TYPE_ATOMIC (node)) fputs (" atomic", file); if (!TYPE_P (node) && TREE_CONSTANT (node)) fputs (" constant", file); else if (TYPE_P (node) && TYPE_SIZES_GIMPLIFIED (node)) fputs (" sizes-gimplified", file); if (TYPE_P (node) && !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (node))) fprintf (file, " address-space-%d", TYPE_ADDR_SPACE (node)); if (TREE_ADDRESSABLE (node)) fputs (" addressable", file); if (TREE_THIS_VOLATILE (node)) fputs (" volatile", file); if (TREE_ASM_WRITTEN (node)) fputs (" asm_written", file); if (TREE_USED (node)) fputs (" used", file); if (TREE_NOTHROW (node)) fputs (" nothrow", file); if (TREE_PUBLIC (node)) fputs (" public", file); if (TREE_PRIVATE (node)) fputs (" private", file); if (TREE_PROTECTED (node)) fputs (" protected", file); if (TREE_STATIC (node)) fputs (code == CALL_EXPR ? " must-tail-call" : " static", file); if (TREE_DEPRECATED (node)) fputs (" deprecated", file); if (TREE_VISITED (node)) fputs (" visited", file); if (code != TREE_VEC && code != INTEGER_CST && code != SSA_NAME) { if (TREE_LANG_FLAG_0 (node)) fputs (" tree_0", file); if (TREE_LANG_FLAG_1 (node)) fputs (" tree_1", file); if (TREE_LANG_FLAG_2 (node)) fputs (" tree_2", file); if (TREE_LANG_FLAG_3 (node)) fputs (" tree_3", file); if (TREE_LANG_FLAG_4 (node)) fputs (" tree_4", file); if (TREE_LANG_FLAG_5 (node)) fputs (" tree_5", file); if (TREE_LANG_FLAG_6 (node)) fputs (" tree_6", file); } /* DECL_ nodes have additional attributes. */ switch (TREE_CODE_CLASS (code)) { case tcc_declaration: if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { if (DECL_UNSIGNED (node)) fputs (" unsigned", file); if (DECL_IGNORED_P (node)) fputs (" ignored", file); if (DECL_ABSTRACT_P (node)) fputs (" abstract", file); if (DECL_EXTERNAL (node)) fputs (" external", file); if (DECL_NONLOCAL (node)) fputs (" nonlocal", file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) { if (DECL_WEAK (node)) fputs (" weak", file); if (DECL_IN_SYSTEM_HEADER (node)) fputs (" in_system_header", file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL) && code != LABEL_DECL && code != FUNCTION_DECL && DECL_REGISTER (node)) fputs (" regdecl", file); if (code == TYPE_DECL && TYPE_DECL_SUPPRESS_DEBUG (node)) fputs (" suppress-debug", file); if (code == FUNCTION_DECL && DECL_FUNCTION_SPECIFIC_TARGET (node)) fputs (" function-specific-target", file); if (code == FUNCTION_DECL && DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node)) fputs (" function-specific-opt", file); if (code == FUNCTION_DECL && DECL_DECLARED_INLINE_P (node)) fputs (" autoinline", file); if (code == FUNCTION_DECL && DECL_BUILT_IN (node)) fputs (" built-in", file); if (code == FUNCTION_DECL && DECL_STATIC_CHAIN (node)) fputs (" static-chain", file); if (TREE_CODE (node) == FUNCTION_DECL && decl_is_tm_clone (node)) fputs (" tm-clone", file); if (code == FIELD_DECL && DECL_PACKED (node)) fputs (" packed", file); if (code == FIELD_DECL && DECL_BIT_FIELD (node)) fputs (" bit-field", file); if (code == FIELD_DECL && DECL_NONADDRESSABLE_P (node)) fputs (" nonaddressable", file); if (code == LABEL_DECL && EH_LANDING_PAD_NR (node)) fprintf (file, " landing-pad:%d", EH_LANDING_PAD_NR (node)); if (code == VAR_DECL && DECL_IN_TEXT_SECTION (node)) fputs (" in-text-section", file); if (code == VAR_DECL && DECL_IN_CONSTANT_POOL (node)) fputs (" in-constant-pool", file); if (code == VAR_DECL && DECL_COMMON (node)) fputs (" common", file); if (code == VAR_DECL && DECL_THREAD_LOCAL_P (node)) { fputs (" ", file); fputs (tls_model_names[DECL_TLS_MODEL (node)], file); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { if (DECL_VIRTUAL_P (node)) fputs (" virtual", file); if (DECL_PRESERVE_P (node)) fputs (" preserve", file); if (DECL_LANG_FLAG_0 (node)) fputs (" decl_0", file); if (DECL_LANG_FLAG_1 (node)) fputs (" decl_1", file); if (DECL_LANG_FLAG_2 (node)) fputs (" decl_2", file); if (DECL_LANG_FLAG_3 (node)) fputs (" decl_3", file); if (DECL_LANG_FLAG_4 (node)) fputs (" decl_4", file); if (DECL_LANG_FLAG_5 (node)) fputs (" decl_5", file); if (DECL_LANG_FLAG_6 (node)) fputs (" decl_6", file); if (DECL_LANG_FLAG_7 (node)) fputs (" decl_7", file); mode = DECL_MODE (node); fprintf (file, " %s", GET_MODE_NAME (mode)); } if ((code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) && DECL_BY_REFERENCE (node)) fputs (" passed-by-reference", file); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS) && DECL_DEFER_OUTPUT (node)) fputs (" defer-output", file); xloc = expand_location (DECL_SOURCE_LOCATION (node)); fprintf (file, " file %s line %d col %d", xloc.file, xloc.line, xloc.column); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { print_node (file, "size", DECL_SIZE (node), indent + 4); print_node (file, "unit size", DECL_SIZE_UNIT (node), indent + 4); if (code != FUNCTION_DECL || DECL_BUILT_IN (node)) indent_to (file, indent + 3); if (DECL_USER_ALIGN (node)) fprintf (file, " user"); fprintf (file, " align %d", DECL_ALIGN (node)); if (code == FIELD_DECL) fprintf (file, " offset_align " HOST_WIDE_INT_PRINT_UNSIGNED, DECL_OFFSET_ALIGN (node)); if (code == FUNCTION_DECL && DECL_BUILT_IN (node)) { if (DECL_BUILT_IN_CLASS (node) == BUILT_IN_MD) fprintf (file, " built-in BUILT_IN_MD %d", DECL_FUNCTION_CODE (node)); else fprintf (file, " built-in %s:%s", built_in_class_names[(int) DECL_BUILT_IN_CLASS (node)], built_in_names[(int) DECL_FUNCTION_CODE (node)]); } } if (code == FIELD_DECL) { print_node (file, "offset", DECL_FIELD_OFFSET (node), indent + 4); print_node (file, "bit offset", DECL_FIELD_BIT_OFFSET (node), indent + 4); if (DECL_BIT_FIELD_TYPE (node)) print_node (file, "bit_field_type", DECL_BIT_FIELD_TYPE (node), indent + 4); } print_node_brief (file, "context", DECL_CONTEXT (node), indent + 4); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) { print_node_brief (file, "attributes", DECL_ATTRIBUTES (node), indent + 4); if (code != PARM_DECL) print_node_brief (file, "initial", DECL_INITIAL (node), indent + 4); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) { print_node_brief (file, "abstract_origin", DECL_ABSTRACT_ORIGIN (node), indent + 4); } if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON)) { print_node (file, "result", DECL_RESULT_FLD (node), indent + 4); } lang_hooks.print_decl (file, node, indent); if (DECL_RTL_SET_P (node)) { indent_to (file, indent + 4); print_rtl (file, DECL_RTL (node)); } if (code == PARM_DECL) { print_node (file, "arg-type", DECL_ARG_TYPE (node), indent + 4); if (DECL_INCOMING_RTL (node) != 0) { indent_to (file, indent + 4); fprintf (file, "incoming-rtl "); print_rtl (file, DECL_INCOMING_RTL (node)); } } else if (code == FUNCTION_DECL && DECL_STRUCT_FUNCTION (node) != 0) { print_node (file, "arguments", DECL_ARGUMENTS (node), indent + 4); indent_to (file, indent + 4); dump_addr (file, "struct-function ", DECL_STRUCT_FUNCTION (node)); } if ((code == VAR_DECL || code == PARM_DECL) && DECL_HAS_VALUE_EXPR_P (node)) print_node (file, "value-expr", DECL_VALUE_EXPR (node), indent + 4); /* Print the decl chain only if decl is at second level. */ if (indent == 4) print_node (file, "chain", TREE_CHAIN (node), indent + 4); else print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4); break; case tcc_type: if (TYPE_UNSIGNED (node)) fputs (" unsigned", file); if (TYPE_NO_FORCE_BLK (node)) fputs (" no-force-blk", file); if (TYPE_STRING_FLAG (node)) fputs (" string-flag", file); if (TYPE_NEEDS_CONSTRUCTING (node)) fputs (" needs-constructing", file); if ((code == RECORD_TYPE || code == UNION_TYPE || code == QUAL_UNION_TYPE || code == ARRAY_TYPE) && TYPE_REVERSE_STORAGE_ORDER (node)) fputs (" reverse-storage-order", file); /* The transparent-union flag is used for different things in different nodes. */ if ((code == UNION_TYPE || code == RECORD_TYPE) && TYPE_TRANSPARENT_AGGR (node)) fputs (" transparent-aggr", file); else if (code == ARRAY_TYPE && TYPE_NONALIASED_COMPONENT (node)) fputs (" nonaliased-component", file); if (TYPE_PACKED (node)) fputs (" packed", file); if (TYPE_RESTRICT (node)) fputs (" restrict", file); if (TYPE_LANG_FLAG_0 (node)) fputs (" type_0", file); if (TYPE_LANG_FLAG_1 (node)) fputs (" type_1", file); if (TYPE_LANG_FLAG_2 (node)) fputs (" type_2", file); if (TYPE_LANG_FLAG_3 (node)) fputs (" type_3", file); if (TYPE_LANG_FLAG_4 (node)) fputs (" type_4", file); if (TYPE_LANG_FLAG_5 (node)) fputs (" type_5", file); if (TYPE_LANG_FLAG_6 (node)) fputs (" type_6", file); if (TYPE_LANG_FLAG_7 (node)) fputs (" type_7", file); mode = TYPE_MODE (node); fprintf (file, " %s", GET_MODE_NAME (mode)); print_node (file, "size", TYPE_SIZE (node), indent + 4); print_node (file, "unit size", TYPE_SIZE_UNIT (node), indent + 4); indent_to (file, indent + 3); if (TYPE_USER_ALIGN (node)) fprintf (file, " user"); fprintf (file, " align %d symtab %d alias set " HOST_WIDE_INT_PRINT_DEC, TYPE_ALIGN (node), TYPE_SYMTAB_ADDRESS (node), (HOST_WIDE_INT) TYPE_ALIAS_SET (node)); if (TYPE_STRUCTURAL_EQUALITY_P (node)) fprintf (file, " structural equality"); else dump_addr (file, " canonical type ", TYPE_CANONICAL (node)); print_node (file, "attributes", TYPE_ATTRIBUTES (node), indent + 4); if (INTEGRAL_TYPE_P (node) || code == REAL_TYPE || code == FIXED_POINT_TYPE) { fprintf (file, " precision %d", TYPE_PRECISION (node)); print_node_brief (file, "min", TYPE_MIN_VALUE (node), indent + 4); print_node_brief (file, "max", TYPE_MAX_VALUE (node), indent + 4); } if (code == ENUMERAL_TYPE) print_node (file, "values", TYPE_VALUES (node), indent + 4); else if (code == ARRAY_TYPE) print_node (file, "domain", TYPE_DOMAIN (node), indent + 4); else if (code == VECTOR_TYPE) fprintf (file, " nunits %d", (int) TYPE_VECTOR_SUBPARTS (node)); else if (code == RECORD_TYPE || code == UNION_TYPE || code == QUAL_UNION_TYPE) print_node (file, "fields", TYPE_FIELDS (node), indent + 4); else if (code == FUNCTION_TYPE || code == METHOD_TYPE) { if (TYPE_METHOD_BASETYPE (node)) print_node_brief (file, "method basetype", TYPE_METHOD_BASETYPE (node), indent + 4); print_node (file, "arg-types", TYPE_ARG_TYPES (node), indent + 4); } else if (code == OFFSET_TYPE) print_node_brief (file, "basetype", TYPE_OFFSET_BASETYPE (node), indent + 4); if (TYPE_CONTEXT (node)) print_node_brief (file, "context", TYPE_CONTEXT (node), indent + 4); lang_hooks.print_type (file, node, indent); if (TYPE_POINTER_TO (node) || TREE_CHAIN (node)) indent_to (file, indent + 3); print_node_brief (file, "pointer_to_this", TYPE_POINTER_TO (node), indent + 4); print_node_brief (file, "reference_to_this", TYPE_REFERENCE_TO (node), indent + 4); print_node_brief (file, "chain", TREE_CHAIN (node), indent + 4); break; case tcc_expression: case tcc_comparison: case tcc_unary: case tcc_binary: case tcc_reference: case tcc_statement: case tcc_vl_exp: if (code == BIND_EXPR) { print_node (file, "vars", TREE_OPERAND (node, 0), indent + 4); print_node (file, "body", TREE_OPERAND (node, 1), indent + 4); print_node (file, "block", TREE_OPERAND (node, 2), indent + 4); break; } if (code == CALL_EXPR) { call_expr_arg_iterator iter; tree arg; print_node (file, "fn", CALL_EXPR_FN (node), indent + 4); print_node (file, "static_chain", CALL_EXPR_STATIC_CHAIN (node), indent + 4); i = 0; FOR_EACH_CALL_EXPR_ARG (arg, iter, node) { char temp[10]; sprintf (temp, "arg %d", i); print_node (file, temp, arg, indent + 4); i++; } }
static bool ortho_init (void) { tree n; input_location = BUILTINS_LOCATION; /* Create a global binding. Don't use push_binding, as neither a BLOCK nor a BIND_EXPR are needed. */ push_binding (GLOBAL_BINDING); build_common_tree_nodes (0, 0); n = build_decl (input_location, TYPE_DECL, get_identifier ("int"), integer_type_node); pushdecl (n); n = build_decl (input_location, TYPE_DECL, get_identifier ("char"), char_type_node); pushdecl (n); /* Create alloca builtin. */ { tree args_type = tree_cons (NULL_TREE, size_type_node, void_list_node); tree func_type = build_function_type (ptr_type_node, args_type); define_builtin ("__builtin_alloca", func_type, BUILT_IN_ALLOCA, NULL, 0); stack_alloc_function_ptr = build1 (ADDR_EXPR, build_pointer_type (func_type), builtin_decl_implicit (BUILT_IN_ALLOCA)); } { tree ptr_ftype = build_function_type (ptr_type_node, NULL_TREE); define_builtin ("__builtin_stack_save", ptr_ftype, BUILT_IN_STACK_SAVE, NULL, 0); } { tree ftype_ptr = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); define_builtin ("__builtin_stack_restore", ftype_ptr, BUILT_IN_STACK_RESTORE, NULL, 0); } { tree ftype_ptr = build_function_type_list (void_type_node, NULL_TREE); define_builtin ("__builtin_trap", ftype_ptr, BUILT_IN_TRAP, NULL, ECF_NOTHROW | ECF_LEAF); TREE_THIS_VOLATILE (builtin_decl_explicit (BUILT_IN_TRAP)) = 1; } { REAL_VALUE_TYPE v; REAL_VALUE_FROM_INT (v, 1, 0, DFmode); real_ldexp (&fp_const_p5, &v, -1); REAL_VALUE_FROM_INT (v, -1, -1, DFmode); real_ldexp (&fp_const_m_p5, &v, -1); REAL_VALUE_FROM_INT (fp_const_zero, 0, 0, DFmode); } build_common_builtin_nodes (); // FIXME: this MAY remove the need for creating the builtins above... // Evaluate tree.c / build_common_builtin_nodes (); for each in turn. return true; }
static void build_field (segment_info *h, tree union_type, record_layout_info rli) { tree field; tree name; HOST_WIDE_INT offset = h->offset; unsigned HOST_WIDE_INT desired_align, known_align; name = get_identifier (h->sym->name); field = build_decl (h->sym->declared_at.lb->location, FIELD_DECL, name, h->field); known_align = (offset & -offset) * BITS_PER_UNIT; if (known_align == 0 || known_align > BIGGEST_ALIGNMENT) known_align = BIGGEST_ALIGNMENT; desired_align = update_alignment_for_field (rli, field, known_align); if (desired_align > known_align) DECL_PACKED (field) = 1; DECL_FIELD_CONTEXT (field) = union_type; DECL_FIELD_OFFSET (field) = size_int (offset); DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node; SET_DECL_OFFSET_ALIGN (field, known_align); rli->offset = size_binop (MAX_EXPR, rli->offset, size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (field), DECL_SIZE_UNIT (field))); /* If this field is assigned to a label, we create another two variables. One will hold the address of target label or format label. The other will hold the length of format label string. */ if (h->sym->attr.assign) { tree len; tree addr; gfc_allocate_lang_decl (field); GFC_DECL_ASSIGN (field) = 1; len = gfc_create_var_np (gfc_charlen_type_node,h->sym->name); addr = gfc_create_var_np (pvoid_type_node, h->sym->name); TREE_STATIC (len) = 1; TREE_STATIC (addr) = 1; DECL_INITIAL (len) = build_int_cst (gfc_charlen_type_node, -2); gfc_set_decl_location (len, &h->sym->declared_at); gfc_set_decl_location (addr, &h->sym->declared_at); GFC_DECL_STRING_LEN (field) = pushdecl_top_level (len); GFC_DECL_ASSIGN_ADDR (field) = pushdecl_top_level (addr); } /* If this field is volatile, mark it. */ if (h->sym->attr.volatile_) { tree new_type; TREE_THIS_VOLATILE (field) = 1; TREE_SIDE_EFFECTS (field) = 1; new_type = build_qualified_type (TREE_TYPE (field), TYPE_QUAL_VOLATILE); TREE_TYPE (field) = new_type; } h->field = field; }
static unsigned int execute_trace () { gimple_seq body, body_bind_body, inner_cleanup, outer_cleanup; gimple inner_try, outer_try; tree record_type, func_start_decl, func_end_decl, var_decl, function_name_decl, constructor_clobber; gimple call_func_start; gimple_stmt_iterator gsi; // build record type record_type = build_type (); // build start & end function decl func_start_decl = build_function_decl ("__start_ctrace__", record_type); func_end_decl = build_function_decl ("__end_ctrace__", record_type); // init variables of current body body = gimple_body (current_function_decl); var_decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier ("__ctrace_var__"), record_type); DECL_CONTEXT (var_decl) = current_function_decl; TREE_ADDRESSABLE (var_decl) = 1; declare_vars (var_decl, body, false); TREE_USED (var_decl) = 1; // mimic __FUNCTION__ builtin. function_name_decl = make_fname_decl (); declare_vars (function_name_decl, body, false); // construct inner try // init calls call_func_start = gimple_build_call ( func_start_decl, 2, build1 (ADDR_EXPR, build_pointer_type (record_type), var_decl), build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (function_name_decl)), function_name_decl)); // make inner clean up inner_cleanup = gimple_build_call ( func_end_decl, 2, build1 (ADDR_EXPR, build_pointer_type (record_type), var_decl), build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (function_name_decl)), function_name_decl)); // update inner try body_bind_body = gimple_bind_body (body); inner_try = gimple_build_try (body_bind_body, inner_cleanup, GIMPLE_TRY_FINALLY); gsi = gsi_start (inner_try); gsi_insert_before (&gsi, call_func_start, GSI_NEW_STMT); // construct outer try constructor_clobber = make_node (CONSTRUCTOR); TREE_THIS_VOLATILE (constructor_clobber) = 1; TREE_TYPE (constructor_clobber) = TREE_TYPE (var_decl); outer_cleanup = gimple_build_assign (var_decl, constructor_clobber); // update outer try outer_try = gimple_build_try (call_func_start, outer_cleanup, GIMPLE_TRY_FINALLY); // update body bind body gimple_bind_set_body (body, outer_try); if (dump_file) { dump_function_to_file (current_function_decl, dump_file, TDF_TREE | TDF_BLOCKS | TDF_VERBOSE); } // exit (0); return 0; }