void spu_cpu_cpp_builtins (struct cpp_reader *pfile) { builtin_define_std ("__SPU__"); cpp_assert (pfile, "cpu=spu"); cpp_assert (pfile, "machine=spu"); if (spu_arch == PROCESSOR_CELLEDP) builtin_define_std ("__SPU_EDP__"); builtin_define_std ("__vector=__attribute__((__spu_vector__))"); if (!flag_iso) { /* Define this when supporting context-sensitive keywords. */ cpp_define (pfile, "__VECTOR_KEYWORD_SUPPORTED__"); cpp_define (pfile, "vector=vector"); /* Initialize vector keywords. */ __vector_keyword = get_identifier ("__vector"); C_CPP_HASHNODE (__vector_keyword)->flags |= NODE_CONDITIONAL; vector_keyword = get_identifier ("vector"); C_CPP_HASHNODE (vector_keyword)->flags |= NODE_CONDITIONAL; /* Enable context-sensitive macros. */ cpp_get_callbacks (pfile)->macro_to_expand = spu_macro_to_expand; } }
static cpp_hashnode * spu_macro_to_expand (cpp_reader *pfile, const cpp_token *tok) { cpp_hashnode *expand_this = tok->val.node.node; cpp_hashnode *ident; ident = spu_categorize_keyword (tok); if (ident == C_CPP_HASHNODE (__vector_keyword)) { tok = cpp_peek_token (pfile, 0); ident = spu_categorize_keyword (tok); if (ident) { enum rid rid_code = (enum rid)(ident->rid_code); if (ident->type == NT_MACRO) { (void) cpp_get_token (pfile); tok = cpp_peek_token (pfile, 0); ident = spu_categorize_keyword (tok); if (ident) rid_code = (enum rid)(ident->rid_code); } if (rid_code == RID_UNSIGNED || rid_code == RID_LONG || rid_code == RID_SHORT || rid_code == RID_SIGNED || rid_code == RID_INT || rid_code == RID_CHAR || rid_code == RID_FLOAT || rid_code == RID_DOUBLE) expand_this = C_CPP_HASHNODE (__vector_keyword); } } return expand_this; }
/* Internal function to either define or undef the appropriate system macros. */ static void s390_cpu_cpp_builtins_internal (cpp_reader *pfile, struct cl_target_option *opts, const struct cl_target_option *old_opts) { s390_def_or_undef_macro (pfile, MASK_OPT_HTM, old_opts, opts, "__HTM__", "__HTM__"); s390_def_or_undef_macro (pfile, MASK_OPT_VX, old_opts, opts, "__VX__", "__VX__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__VEC__=10302", "__VEC__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__vector=__attribute__((vector_size(16)))", "__vector__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__bool=__attribute__((s390_vector_bool)) unsigned", "__bool"); { char macro_def[64]; gcc_assert (s390_arch != PROCESSOR_NATIVE); sprintf (macro_def, "__ARCH__=%d", processor_table[s390_arch].arch_level); cpp_undef (pfile, "__ARCH__"); cpp_define (pfile, macro_def); } if (!flag_iso) { s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__VECTOR_KEYWORD_SUPPORTED__", "__VECTOR_KEYWORD_SUPPORTED__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "vector=vector", "vector"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "bool=bool", "bool"); if (TARGET_ZVECTOR_P (opts->x_target_flags) && __vector_keyword == NULL) { __vector_keyword = get_identifier ("__vector"); C_CPP_HASHNODE (__vector_keyword)->flags |= NODE_CONDITIONAL; vector_keyword = get_identifier ("vector"); C_CPP_HASHNODE (vector_keyword)->flags |= NODE_CONDITIONAL; __bool_keyword = get_identifier ("__bool"); C_CPP_HASHNODE (__bool_keyword)->flags |= NODE_CONDITIONAL; bool_keyword = get_identifier ("bool"); C_CPP_HASHNODE (bool_keyword)->flags |= NODE_CONDITIONAL; _Bool_keyword = get_identifier ("_Bool"); C_CPP_HASHNODE (_Bool_keyword)->flags |= NODE_CONDITIONAL; /* Enable context-sensitive macros. */ cpp_get_callbacks (pfile)->macro_to_expand = s390_macro_to_expand; } } }
static cpp_hashnode * spu_categorize_keyword (const cpp_token *tok) { if (tok->type == CPP_NAME) { cpp_hashnode *ident = tok->val.node.node; if (ident == C_CPP_HASHNODE (vector_keyword) || ident == C_CPP_HASHNODE (__vector_keyword)) return C_CPP_HASHNODE (__vector_keyword); else return ident; } return 0; }
/* Internal function to either define or undef the appropriate system macros. */ static void s390_cpu_cpp_builtins_internal (cpp_reader *pfile, struct cl_target_option *opts, const struct cl_target_option *old_opts) { s390_def_or_undef_macro (pfile, MASK_OPT_HTM, old_opts, opts, "__HTM__", "__HTM__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__VEC__=10301", "__VEC__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__vector=__attribute__((vector_size(16)))", "__vector__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__bool=__attribute__((s390_vector_bool)) unsigned", "__bool"); if (!flag_iso) { s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "__VECTOR_KEYWORD_SUPPORTED__", "__VECTOR_KEYWORD_SUPPORTED__"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "vector=vector", "vector"); s390_def_or_undef_macro (pfile, MASK_ZVECTOR, old_opts, opts, "bool=bool", "bool"); if (TARGET_ZVECTOR_P (opts->x_target_flags) && __vector_keyword == NULL) { __vector_keyword = get_identifier ("__vector"); C_CPP_HASHNODE (__vector_keyword)->flags |= NODE_CONDITIONAL; vector_keyword = get_identifier ("vector"); C_CPP_HASHNODE (vector_keyword)->flags |= NODE_CONDITIONAL; __bool_keyword = get_identifier ("__bool"); C_CPP_HASHNODE (__bool_keyword)->flags |= NODE_CONDITIONAL; bool_keyword = get_identifier ("bool"); C_CPP_HASHNODE (bool_keyword)->flags |= NODE_CONDITIONAL; _Bool_keyword = get_identifier ("_Bool"); C_CPP_HASHNODE (_Bool_keyword)->flags |= NODE_CONDITIONAL; /* Enable context-sensitive macros. */ cpp_get_callbacks (pfile)->macro_to_expand = s390_macro_to_expand; } } }
void spu_cpu_cpp_builtins (struct cpp_reader *pfile) { cpp_define (pfile, "__SPU__"); cpp_assert (pfile, "cpu=spu"); cpp_assert (pfile, "machine=spu"); if (spu_arch == PROCESSOR_CELLEDP) cpp_define (pfile, "__SPU_EDP__"); if (cpp_get_options (pfile)->lang != CLK_ASM) cpp_define (pfile, "__vector=__attribute__((__spu_vector__))"); switch (spu_ea_model) { case 32: cpp_define (pfile, "__EA32__"); break; case 64: cpp_define (pfile, "__EA64__"); break; default: gcc_unreachable (); } if (!flag_iso && cpp_get_options (pfile)->lang != CLK_ASM) { /* Define this when supporting context-sensitive keywords. */ cpp_define (pfile, "__VECTOR_KEYWORD_SUPPORTED__"); cpp_define (pfile, "vector=vector"); /* Initialize vector keywords. */ __vector_keyword = get_identifier ("__vector"); C_CPP_HASHNODE (__vector_keyword)->flags |= NODE_CONDITIONAL; vector_keyword = get_identifier ("vector"); C_CPP_HASHNODE (vector_keyword)->flags |= NODE_CONDITIONAL; /* Enable context-sensitive macros. */ cpp_get_callbacks (pfile)->macro_to_expand = spu_macro_to_expand; } }
static bool arm_pragma_target_parse (tree args, tree pop_target) { tree prev_tree = target_option_current_node; tree cur_tree; struct cl_target_option *prev_opt; struct cl_target_option *cur_opt; if (! args) { cur_tree = ((pop_target) ? pop_target : target_option_default_node); cl_target_option_restore (&global_options, TREE_TARGET_OPTION (cur_tree)); } else { cur_tree = arm_valid_target_attribute_tree (args, &global_options, &global_options_set); if (cur_tree == NULL_TREE) { cl_target_option_restore (&global_options, TREE_TARGET_OPTION (prev_tree)); return false; } /* handle_pragma_pop_options and handle_pragma_reset_options will set target_option_current_node, but not handle_pragma_target. */ target_option_current_node = cur_tree; } /* Update macros if target_node changes. The global state will be restored by arm_set_current_function. */ prev_opt = TREE_TARGET_OPTION (prev_tree); cur_opt = TREE_TARGET_OPTION (cur_tree); gcc_assert (prev_opt); gcc_assert (cur_opt); if (cur_opt != prev_opt) { /* For the definitions, ensure all newly defined macros are considered as used for -Wunused-macros. There is no point warning about the compiler predefined macros. */ cpp_options *cpp_opts = cpp_get_options (parse_in); unsigned char saved_warn_unused_macros = cpp_opts->warn_unused_macros; cpp_opts->warn_unused_macros = 0; /* Update macros. */ gcc_assert (cur_opt->x_target_flags == target_flags); /* Don't warn for macros that have context sensitive values depending on other attributes. See warn_of_redefinition, reset after cpp_create_definition. */ tree acond_macro = get_identifier ("__ARM_NEON_FP"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL ; acond_macro = get_identifier ("__ARM_FP"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL; acond_macro = get_identifier ("__ARM_FEATURE_LDREX"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL; arm_cpu_builtins (parse_in); cpp_opts->warn_unused_macros = saved_warn_unused_macros; /* Make sure that target_reinit is called for next function, since TREE_TARGET_OPTION might change with the #pragma even if there is no target attribute attached to the function. */ arm_reset_previous_fndecl (); /* If going to the default mode, we restore the initial states. if cur_tree is a new target, states will be saved/restored on a per function basis in arm_set_current_function. */ if (cur_tree == target_option_default_node) save_restore_target_globals (cur_tree); } return true; }
static cpp_hashnode * s390_macro_to_expand (cpp_reader *pfile, const cpp_token *tok) { cpp_hashnode *expand_this = tok->val.node.node; cpp_hashnode *ident; static bool expand_bool_p = false; int idx = 0; enum rid rid_code; /* The vector keyword is only expanded if the machine actually provides hardware support. */ if (!TARGET_ZVECTOR) return NULL; ident = s390_categorize_keyword (tok); /* Triggered when we picked a different variant in s390_categorize_keyword. */ if (ident != expand_this) expand_this = NULL; /* The vector keyword has been found already and we remembered to expand the next bool. */ if (expand_bool_p && ident == C_CPP_HASHNODE (__bool_keyword)) { expand_bool_p = false; return ident; } if (ident != C_CPP_HASHNODE (__vector_keyword)) return expand_this; do tok = cpp_peek_token (pfile, idx++); while (tok->type == CPP_PADDING); ident = s390_categorize_keyword (tok); if (!ident) return expand_this; /* vector bool - remember to expand the next bool. */ if (ident == C_CPP_HASHNODE (__bool_keyword)) { expand_bool_p = true; return C_CPP_HASHNODE (__vector_keyword); } /* The boost libraries have code with Iterator::vector vector in it. If we allow the normal handling, this module will be called recursively, and the vector will be skipped.; */ if (ident == C_CPP_HASHNODE (__vector_keyword)) return expand_this; rid_code = (enum rid)(ident->rid_code); if (ident->type == NT_MACRO) { /* Now actually fetch the tokens we "peeked" before and do a lookahead for the next. */ do (void) cpp_get_token (pfile); while (--idx > 0); do tok = cpp_peek_token (pfile, idx++); while (tok->type == CPP_PADDING); ident = s390_categorize_keyword (tok); if (ident == C_CPP_HASHNODE (__bool_keyword)) { expand_bool_p = true; return C_CPP_HASHNODE (__vector_keyword); } else if (ident) rid_code = (enum rid)(ident->rid_code); } /* vector keyword followed by type identifier: vector unsigned, vector long, ... Types consisting of more than one identifier are not supported by zvector e.g. long long, long double, unsigned long int. */ if (rid_code == RID_UNSIGNED || rid_code == RID_LONG || rid_code == RID_SHORT || rid_code == RID_SIGNED || rid_code == RID_INT || rid_code == RID_CHAR || rid_code == RID_DOUBLE) { expand_this = C_CPP_HASHNODE (__vector_keyword); /* If the next keyword is bool, it will need to be expanded as well. */ do tok = cpp_peek_token (pfile, idx++); while (tok->type == CPP_PADDING); ident = s390_categorize_keyword (tok); /* __vector long __bool a; */ if (ident == C_CPP_HASHNODE (__bool_keyword)) expand_bool_p = true; else { /* Triggered with: __vector long long __bool a; */ do tok = cpp_peek_token (pfile, idx++); while (tok->type == CPP_PADDING); ident = s390_categorize_keyword (tok); if (ident == C_CPP_HASHNODE (__bool_keyword)) expand_bool_p = true; } } return expand_this; }
/* Hook to validate the current #pragma GCC target and set the arch custom mode state. If ARGS is NULL, then POP_TARGET is used to reset the options. */ static bool arm_pragma_target_parse (tree args, tree pop_target) { tree prev_tree = build_target_option_node (&global_options); tree cur_tree; struct cl_target_option *prev_opt; struct cl_target_option *cur_opt; if (! args) { cur_tree = ((pop_target) ? pop_target : target_option_default_node); cl_target_option_restore (&global_options, TREE_TARGET_OPTION (cur_tree)); } else { cur_tree = arm_valid_target_attribute_tree (args, &global_options, &global_options_set); if (cur_tree == NULL_TREE) { cl_target_option_restore (&global_options, TREE_TARGET_OPTION (prev_tree)); return false; } } target_option_current_node = cur_tree; arm_reset_previous_fndecl (); /* Figure out the previous mode. */ prev_opt = TREE_TARGET_OPTION (prev_tree); cur_opt = TREE_TARGET_OPTION (cur_tree); gcc_assert (prev_opt); gcc_assert (cur_opt); if (cur_opt != prev_opt) { /* For the definitions, ensure all newly defined macros are considered as used for -Wunused-macros. There is no point warning about the compiler predefined macros. */ cpp_options *cpp_opts = cpp_get_options (parse_in); unsigned char saved_warn_unused_macros = cpp_opts->warn_unused_macros; unsigned char saved_warn_builtin_macro_redefined = cpp_opts->warn_builtin_macro_redefined; cpp_opts->warn_unused_macros = 0; cpp_opts->warn_builtin_macro_redefined = 0; /* Update macros. */ gcc_assert (cur_opt->x_target_flags == target_flags); /* Don't warn for macros that have context sensitive values depending on other attributes. See warn_of_redefinition, Reset after cpp_create_definition. */ tree acond_macro = get_identifier ("__ARM_NEON_FP"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL ; acond_macro = get_identifier ("__ARM_FP"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL; acond_macro = get_identifier ("__ARM_FEATURE_LDREX"); C_CPP_HASHNODE (acond_macro)->flags |= NODE_CONDITIONAL; arm_cpu_builtins (parse_in); cpp_opts->warn_builtin_macro_redefined = saved_warn_builtin_macro_redefined; cpp_opts->warn_unused_macros = saved_warn_unused_macros; } return true; }