static void record_common_node (struct streamer_tree_cache_d *cache, tree node) { /* We have to make sure to fill exactly the same number of elements for all frontends. That can include NULL trees. As our hash table can't deal with zero entries we'll simply stream a random other tree. A NULL tree never will be looked up so it doesn't matter which tree we replace it with, just to be sure use error_mark_node. */ if (!node) node = error_mark_node; streamer_tree_cache_append (cache, node); if (POINTER_TYPE_P (node) || TREE_CODE (node) == COMPLEX_TYPE || TREE_CODE (node) == ARRAY_TYPE) record_common_node (cache, TREE_TYPE (node)); else if (TREE_CODE (node) == RECORD_TYPE) { /* The FIELD_DECLs of structures should be shared, so that every COMPONENT_REF uses the same tree node when referencing a field. Pointer equality between FIELD_DECLs is used by the alias machinery to compute overlapping memory references (See nonoverlapping_component_refs_p). */ tree f; for (f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f)) record_common_node (cache, f); } }
tree streamer_get_builtin_tree (struct lto_input_block *ib, struct data_in *data_in) { enum built_in_class fclass; enum built_in_function fcode; const char *asmname; tree result; fclass = streamer_read_enum (ib, built_in_class, BUILT_IN_LAST); gcc_assert (fclass == BUILT_IN_NORMAL || fclass == BUILT_IN_MD); fcode = (enum built_in_function) streamer_read_uhwi (ib); if (fclass == BUILT_IN_NORMAL) { if (fcode >= END_BUILTINS) fatal_error (input_location, "machine independent builtin code out of range"); result = builtin_decl_explicit (fcode); if (!result) { if (fcode > BEGIN_CHKP_BUILTINS && fcode < END_CHKP_BUILTINS) { fcode = (enum built_in_function) (fcode - BEGIN_CHKP_BUILTINS - 1); result = builtin_decl_explicit (fcode); result = chkp_maybe_clone_builtin_fndecl (result); } else if (fcode > BEGIN_SANITIZER_BUILTINS && fcode < END_SANITIZER_BUILTINS) { initialize_sanitizer_builtins (); result = builtin_decl_explicit (fcode); } } gcc_assert (result); } else if (fclass == BUILT_IN_MD) { result = targetm.builtin_decl (fcode, true); if (!result || result == error_mark_node) fatal_error (input_location, "target specific builtin not available"); } else gcc_unreachable (); asmname = streamer_read_string (data_in, ib); if (asmname) set_builtin_user_assembler_name (result, asmname); streamer_tree_cache_append (data_in->reader_cache, result, 0); return result; }
static void record_common_node (struct streamer_tree_cache_d *cache, tree node) { /* If we recursively end up at nodes we do not want to preload simply don't. ??? We'd want to verify that this doesn't happen, or alternatively do not recurse at all. */ if (node == char_type_node) return; gcc_checking_assert (node != boolean_type_node && node != boolean_true_node && node != boolean_false_node); /* We have to make sure to fill exactly the same number of elements for all frontends. That can include NULL trees. As our hash table can't deal with zero entries we'll simply stream a random other tree. A NULL tree never will be looked up so it doesn't matter which tree we replace it with, just to be sure use error_mark_node. */ if (!node) node = error_mark_node; /* ??? FIXME, devise a better hash value. But the hash needs to be equal for all frontend and lto1 invocations. So just use the position in the cache as hash value. */ streamer_tree_cache_append (cache, node, cache->nodes.length ()); if (POINTER_TYPE_P (node) || TREE_CODE (node) == COMPLEX_TYPE || TREE_CODE (node) == ARRAY_TYPE) record_common_node (cache, TREE_TYPE (node)); else if (TREE_CODE (node) == RECORD_TYPE) { /* The FIELD_DECLs of structures should be shared, so that every COMPONENT_REF uses the same tree node when referencing a field. Pointer equality between FIELD_DECLs is used by the alias machinery to compute overlapping component references (see nonoverlapping_component_refs_p and nonoverlapping_component_refs_of_decl_p). */ for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f)) record_common_node (cache, f); } }
static void record_common_node (struct streamer_tree_cache_d *cache, tree node) { /* If we recursively end up at nodes we do not want to preload simply don't. ??? We'd want to verify that this doesn't happen, or alternatively do not recurse at all. */ if (node == char_type_node) return; gcc_checking_assert (node != boolean_type_node && node != boolean_true_node && node != boolean_false_node); /* We have to make sure to fill exactly the same number of elements for all frontends. That can include NULL trees. As our hash table can't deal with zero entries we'll simply stream a random other tree. A NULL tree never will be looked up so it doesn't matter which tree we replace it with, just to be sure use error_mark_node. */ if (!node) node = error_mark_node; /* ??? FIXME, devise a better hash value. But the hash needs to be equal for all frontend and lto1 invocations. So just use the position in the cache as hash value. */ streamer_tree_cache_append (cache, node, cache->nodes.length ()); switch (TREE_CODE (node)) { case ERROR_MARK: case FIELD_DECL: case FIXED_POINT_TYPE: case IDENTIFIER_NODE: case INTEGER_CST: case INTEGER_TYPE: case POINTER_BOUNDS_TYPE: case REAL_TYPE: case TREE_LIST: case VOID_CST: case VOID_TYPE: /* No recursive trees. */ break; case ARRAY_TYPE: case POINTER_TYPE: case REFERENCE_TYPE: record_common_node (cache, TREE_TYPE (node)); break; case COMPLEX_TYPE: /* Verify that a complex type's component type (node_type) has been handled already (and we thus don't need to recurse here). */ verify_common_node_recorded (cache, TREE_TYPE (node)); break; case RECORD_TYPE: /* The FIELD_DECLs of structures should be shared, so that every COMPONENT_REF uses the same tree node when referencing a field. Pointer equality between FIELD_DECLs is used by the alias machinery to compute overlapping component references (see nonoverlapping_component_refs_p and nonoverlapping_component_refs_of_decl_p). */ for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f)) record_common_node (cache, f); break; default: /* Unexpected tree code. */ gcc_unreachable (); } }