/* Adjust the optimization macros when a #pragma GCC optimization is done to reflect the current level. */ void c_cpp_builtins_optimize_pragma (cpp_reader *pfile, tree prev_tree, tree cur_tree) { struct cl_optimization *prev = TREE_OPTIMIZATION (prev_tree); struct cl_optimization *cur = TREE_OPTIMIZATION (cur_tree); bool prev_fast_math; bool cur_fast_math; /* -undef turns off target-specific built-ins. */ if (flag_undef) return; /* Other target-independent built-ins determined by command-line options. */ if (!prev->x_optimize_size && cur->x_optimize_size) cpp_define (pfile, "__OPTIMIZE_SIZE__"); else if (prev->x_optimize_size && !cur->x_optimize_size) cpp_undef (pfile, "__OPTIMIZE_SIZE__"); if (!prev->x_optimize && cur->x_optimize) cpp_define (pfile, "__OPTIMIZE__"); else if (prev->x_optimize && !cur->x_optimize) cpp_undef (pfile, "__OPTIMIZE__"); prev_fast_math = fast_math_flags_struct_set_p (prev); cur_fast_math = fast_math_flags_struct_set_p (cur); if (!prev_fast_math && cur_fast_math) cpp_define (pfile, "__FAST_MATH__"); else if (prev_fast_math && !cur_fast_math) cpp_undef (pfile, "__FAST_MATH__"); if (!prev->x_flag_signaling_nans && cur->x_flag_signaling_nans) cpp_define (pfile, "__SUPPORT_SNAN__"); else if (prev->x_flag_signaling_nans && !cur->x_flag_signaling_nans) cpp_undef (pfile, "__SUPPORT_SNAN__"); if (!prev->x_flag_finite_math_only && cur->x_flag_finite_math_only) { cpp_undef (pfile, "__FINITE_MATH_ONLY__"); cpp_define (pfile, "__FINITE_MATH_ONLY__=1"); } else if (prev->x_flag_finite_math_only && !cur->x_flag_finite_math_only) { cpp_undef (pfile, "__FINITE_MATH_ONLY__"); cpp_define (pfile, "__FINITE_MATH_ONLY__=0"); } }
static void unpack_ts_optimization (struct bitpack_d *bp, tree expr) { unsigned i, len; struct cl_optimization *t = TREE_OPTIMIZATION (expr); len = sizeof (struct cl_optimization); for (i = 0; i < len; i++) ((unsigned char *)t)[i] = bp_unpack_value (bp, 8); if (bp_unpack_value (bp, 32) != 0x12345678) fatal_error ("cl_optimization size mismatch in LTO reader and writer"); }
static void lto_input_ts_optimization (struct lto_input_block *ib, tree expr) { unsigned i, len; struct bitpack_d bp; struct cl_optimization *t = TREE_OPTIMIZATION (expr); bp = streamer_read_bitpack (ib); len = sizeof (struct cl_optimization); for (i = 0; i < len; i++) ((unsigned char *)t)[i] = bp_unpack_value (&bp, 8); if (bp_unpack_value (&bp, 32) != 0x12345678) fatal_error ("cl_optimization size mismatch in LTO reader and writer"); }
static void pack_ts_optimization (struct bitpack_d *bp, tree expr) { struct cl_optimization *t = TREE_OPTIMIZATION (expr); unsigned i, len; /* The cl_optimization is generated by the options awk script, so we just recreate a byte-by-byte copy here. */ len = sizeof (struct cl_optimization); for (i = 0; i < len; i++) bp_pack_value (bp, ((unsigned char *)t)[i], 8); /* Catch struct size mismatches between reader and writer. */ bp_pack_value (bp, 0x12345678, 32); }
struct target_globals * save_target_globals_default_opts () { struct target_globals *globals; if (optimization_current_node != optimization_default_node) { tree opts = optimization_current_node; /* Temporarily switch to the default optimization node, so that *this_target_optabs is set to the default, not reflecting whatever a previous function used for the optimize attribute. */ optimization_current_node = optimization_default_node; cl_optimization_restore (&global_options, TREE_OPTIMIZATION (optimization_default_node)); globals = save_target_globals (); optimization_current_node = opts; cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts)); return globals; } return save_target_globals (); }
void streamer_read_tree_bitfields (struct lto_input_block *ib, struct data_in *data_in, tree expr) { enum tree_code code; struct bitpack_d bp; /* Read the bitpack of non-pointer values from IB. */ bp = streamer_read_bitpack (ib); /* The first word in BP contains the code of the tree that we are about to read. */ code = (enum tree_code) bp_unpack_value (&bp, 16); lto_tag_check (lto_tree_code_to_tag (code), lto_tree_code_to_tag (TREE_CODE (expr))); /* Note that all these functions are highly sensitive to changes in the types and sizes of each of the fields being packed. */ unpack_ts_base_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) unpack_ts_int_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) unpack_ts_real_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) unpack_ts_fixed_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) stream_input_location (&DECL_SOURCE_LOCATION (expr), &bp, data_in); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) unpack_ts_decl_common_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) unpack_ts_decl_wrtl_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) unpack_ts_decl_with_vis_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) unpack_ts_function_decl_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) unpack_ts_type_common_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_EXP)) { stream_input_location (&EXPR_CHECK (expr)->exp.locus, &bp, data_in); if (code == MEM_REF || code == TARGET_MEM_REF) { MR_DEPENDENCE_CLIQUE (expr) = (unsigned)bp_unpack_value (&bp, sizeof (short) * 8); if (MR_DEPENDENCE_CLIQUE (expr) != 0) MR_DEPENDENCE_BASE (expr) = (unsigned)bp_unpack_value (&bp, sizeof (short) * 8); } } if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) unpack_ts_block_value_fields (data_in, &bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) unpack_ts_translation_unit_decl_value_fields (data_in, &bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) cl_optimization_stream_in (&bp, TREE_OPTIMIZATION (expr)); if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) { unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (&bp); if (length > 0) vec_safe_grow (BINFO_BASE_ACCESSES (expr), length); } if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) { unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (&bp); if (length > 0) vec_safe_grow (CONSTRUCTOR_ELTS (expr), length); } #ifndef ACCEL_COMPILER if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)) { cl_target_option_stream_in (data_in, &bp, TREE_TARGET_OPTION (expr)); if (targetm.target_option.post_stream_in) targetm.target_option.post_stream_in (TREE_TARGET_OPTION (expr)); } #endif if (code == OMP_CLAUSE) unpack_ts_omp_clause_value_fields (data_in, &bp, expr); }
void streamer_write_tree_bitfields (struct output_block *ob, tree expr) { bitpack_d bp = bitpack_create (ob->main_stream); enum tree_code code; code = TREE_CODE (expr); /* Note that all these functions are highly sensitive to changes in the types and sizes of each of the fields being packed. */ pack_ts_base_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) pack_ts_int_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) pack_ts_real_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) pack_ts_fixed_cst_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) stream_output_location (ob, &bp, DECL_SOURCE_LOCATION (expr)); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) pack_ts_decl_common_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) pack_ts_decl_wrtl_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) pack_ts_decl_with_vis_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) pack_ts_function_decl_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) pack_ts_type_common_value_fields (&bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_EXP)) { stream_output_location (ob, &bp, EXPR_LOCATION (expr)); if (code == MEM_REF || code == TARGET_MEM_REF) { bp_pack_value (&bp, MR_DEPENDENCE_CLIQUE (expr), sizeof (short) * 8); if (MR_DEPENDENCE_CLIQUE (expr) != 0) bp_pack_value (&bp, MR_DEPENDENCE_BASE (expr), sizeof (short) * 8); } } if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) pack_ts_block_value_fields (ob, &bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) pack_ts_translation_unit_decl_value_fields (ob, &bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) cl_optimization_stream_out (&bp, TREE_OPTIMIZATION (expr)); if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) bp_pack_var_len_unsigned (&bp, vec_safe_length (BINFO_BASE_ACCESSES (expr))); if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) bp_pack_var_len_unsigned (&bp, CONSTRUCTOR_NELTS (expr)); if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION) /* Don't stream these when passing things to a different target. */ && !lto_stream_offload_p) cl_target_option_stream_out (ob, &bp, TREE_TARGET_OPTION (expr)); if (code == OMP_CLAUSE) pack_ts_omp_clause_value_fields (ob, &bp, expr); streamer_write_bitpack (&bp); }
static void unpack_value_fields (struct data_in *data_in, struct bitpack_d *bp, tree expr) { enum tree_code code; code = TREE_CODE (expr); /* Note that all these functions are highly sensitive to changes in the types and sizes of each of the fields being packed. */ unpack_ts_base_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_INT_CST)) unpack_ts_int_cst_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST)) unpack_ts_real_cst_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST)) unpack_ts_fixed_cst_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL)) DECL_SOURCE_LOCATION (expr) = stream_input_location (bp, data_in); if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) unpack_ts_decl_common_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL)) unpack_ts_decl_wrtl_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS)) unpack_ts_decl_with_vis_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL)) unpack_ts_function_decl_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON)) unpack_ts_type_common_value_fields (bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_EXP)) { SET_EXPR_LOCATION (expr, stream_input_location (bp, data_in)); if (code == MEM_REF || code == TARGET_MEM_REF) { MR_DEPENDENCE_CLIQUE (expr) = (unsigned)bp_unpack_value (bp, sizeof (short) * 8); if (MR_DEPENDENCE_CLIQUE (expr) != 0) MR_DEPENDENCE_BASE (expr) = (unsigned)bp_unpack_value (bp, sizeof (short) * 8); } } if (CODE_CONTAINS_STRUCT (code, TS_BLOCK)) unpack_ts_block_value_fields (data_in, bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL)) unpack_ts_translation_unit_decl_value_fields (data_in, bp, expr); if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION)) cl_optimization_stream_in (bp, TREE_OPTIMIZATION (expr)); if (CODE_CONTAINS_STRUCT (code, TS_BINFO)) { unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (bp); if (length > 0) vec_safe_grow (BINFO_BASE_ACCESSES (expr), length); } if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR)) { unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (bp); if (length > 0) vec_safe_grow (CONSTRUCTOR_ELTS (expr), length); } #ifndef ACCEL_COMPILER if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)) cl_target_option_stream_in (data_in, bp, TREE_TARGET_OPTION (expr)); #endif if (code == OMP_CLAUSE) unpack_ts_omp_clause_value_fields (data_in, bp, expr); }