static void require_user_regions (int from_tty) { struct mem_region *m; int ix, length; /* If we're already using a user-provided list, nothing to do. */ if (!mem_use_target) return; /* Switch to a user-provided list (possibly a copy of the current one). */ mem_use_target = 0; /* If we don't have a target-provided region list yet, then no need to warn. */ if (mem_region_list == NULL) return; /* Otherwise, let the user know how to get back. */ if (from_tty) warning (_("Switching to manual control of memory regions; use " "\"mem auto\" to fetch regions from the target again.")); /* And create a new list for the user to modify. */ length = VEC_length (mem_region_s, target_mem_region_list); mem_region_list = VEC_alloc (mem_region_s, length); for (ix = 0; VEC_iterate (mem_region_s, target_mem_region_list, ix, m); ix++) VEC_quick_push (mem_region_s, mem_region_list, m); }
void redirect_edge_var_map_add (edge e, tree result, tree def) { void **slot; edge_var_map_vector old_head, head; edge_var_map new_node; if (edge_var_maps == NULL) edge_var_maps = pointer_map_create (); slot = pointer_map_insert (edge_var_maps, e); old_head = head = (edge_var_map_vector) *slot; if (!head) { head = VEC_alloc (edge_var_map, heap, 5); *slot = head; } new_node.def = def; new_node.result = result; VEC_safe_push (edge_var_map, heap, head, &new_node); if (old_head != head) { /* The push did some reallocation. Update the pointer map. */ *slot = head; } }
static tree vxworks_emutls_var_init (tree var, tree decl, tree tmpl_addr) { VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 3); constructor_elt *elt; tree type = TREE_TYPE (var); tree field = TYPE_FIELDS (type); elt = VEC_quick_push (constructor_elt, v, NULL); elt->index = field; elt->value = fold_convert (TREE_TYPE (field), tmpl_addr); elt = VEC_quick_push (constructor_elt, v, NULL); field = DECL_CHAIN (field); elt->index = field; elt->value = build_int_cst (TREE_TYPE (field), 0); elt = VEC_quick_push (constructor_elt, v, NULL); field = DECL_CHAIN (field); elt->index = field; elt->value = fold_convert (TREE_TYPE (field), DECL_SIZE_UNIT (decl)); return build_constructor (type, v); }
static void gen_fake_funcs(int n_funcs) { unsigned i; fakes = VEC_alloc(tree, gc, 0); for (i=0; i<n_funcs; ++i) VEC_safe_push(tree, gc, fakes, build_junk_fn(i)); }
void init_ssa_operands (void) { if (!n_initialized++) { build_defs = VEC_alloc (tree, heap, 5); build_uses = VEC_alloc (tree, heap, 10); build_vuse = NULL_TREE; build_vdef = NULL_TREE; bitmap_obstack_initialize (&operands_bitmap_obstack); } gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); gimple_ssa_operands (cfun)->operand_memory_index = gimple_ssa_operands (cfun)->ssa_operand_mem_size; gimple_ssa_operands (cfun)->ops_active = true; memset (&clobber_stats, 0, sizeof (clobber_stats)); gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; create_vop_var (); }
static void create_temp_arrays (struct switch_conv_info *info) { int i; info->default_values = XCNEWVEC (tree, info->phi_count * 3); info->constructors = XCNEWVEC (VEC (constructor_elt, gc) *, info->phi_count); info->target_inbound_names = info->default_values + info->phi_count; info->target_outbound_names = info->target_inbound_names + info->phi_count; for (i = 0; i < info->phi_count; i++) info->constructors[i] = VEC_alloc (constructor_elt, gc, tree_low_cst (info->range_size, 1) + 1); }
void init_ssanames (void) { ssa_names = VEC_alloc (tree, gc, 50); /* Version 0 is special, so reserve the first slot in the table. Though currently unused, we may use version 0 in alias analysis as part of the heuristics used to group aliases when the alias sets are too large. We use VEC_quick_push here because we know that SSA_NAMES has at least 50 elements reserved in it. */ VEC_quick_push (tree, ssa_names, NULL_TREE); free_ssanames = NULL; }
static void create_temp_arrays (void) { int i; info.default_values = (tree *) xcalloc (info.phi_count, sizeof (tree)); info.constructors = (VEC (constructor_elt, gc) **) xcalloc (info.phi_count, sizeof (tree)); info.target_inbound_names = (tree *) xcalloc (info.phi_count, sizeof (tree)); info.target_outbound_names = (tree *) xcalloc (info.phi_count, sizeof (tree)); for (i = 0; i < info.phi_count; i++) info.constructors[i] = VEC_alloc (constructor_elt, gc, tree_low_cst (info.range_size, 1) + 1); }
void pbb_remove_duplicate_pdrs (poly_bb_p pbb) { int i, j; poly_dr_p pdr1, pdr2; unsigned n = VEC_length (poly_dr_p, PBB_DRS (pbb)); VEC (poly_dr_p, heap) *collapsed = VEC_alloc (poly_dr_p, heap, n); for (i = 0; VEC_iterate (poly_dr_p, PBB_DRS (pbb), i, pdr1); i++) for (j = 0; VEC_iterate (poly_dr_p, collapsed, j, pdr2); j++) if (!can_collapse_pdrs (pdr1, pdr2)) VEC_quick_push (poly_dr_p, collapsed, pdr1); VEC_free (poly_dr_p, heap, collapsed); PBB_PDR_DUPLICATES_REMOVED (pbb) = true; }
void init_rtti_processing (void) { tree type_info_type; push_namespace (std_identifier); type_info_type = xref_tag (class_type, get_identifier ("type_info"), /*tag_scope=*/ts_current, false); pop_namespace (); const_type_info_type_node = build_qualified_type (type_info_type, TYPE_QUAL_CONST); type_info_ptr_type = build_pointer_type (const_type_info_type_node); unemitted_tinfo_decls = VEC_alloc (tree, 124); create_tinfo_types (); }
void init_ssanames (struct function *fn, int size) { if (size < 50) size = 50; SSANAMES (fn) = VEC_alloc (tree, gc, size); /* Version 0 is special, so reserve the first slot in the table. Though currently unused, we may use version 0 in alias analysis as part of the heuristics used to group aliases when the alias sets are too large. We use VEC_quick_push here because we know that SSA_NAMES has at least 50 elements reserved in it. */ VEC_quick_push (tree, SSANAMES (fn), NULL_TREE); FREE_SSANAMES (fn) = NULL; SYMS_TO_RENAME (fn) = BITMAP_GGC_ALLOC (); }
void redirect_edge_var_map_dup (edge newe, edge olde) { void **new_slot, **old_slot; edge_var_map_vector head; if (!edge_var_maps) return; new_slot = pointer_map_insert (edge_var_maps, newe); old_slot = pointer_map_contains (edge_var_maps, olde); if (!old_slot) return; head = (edge_var_map_vector) *old_slot; if (head) *new_slot = VEC_copy (edge_var_map, heap, head); else *new_slot = VEC_alloc (edge_var_map, heap, 5); }
/* Propagate the constant parameters found by ipcp_iterate_stage() to the function's code. */ static void ipcp_insert_stage (void) { struct cgraph_node *node, *node1 = NULL; int i; VEC (cgraph_edge_p, heap) * redirect_callers; VEC (ipa_replace_map_p,gc)* replace_trees; int node_callers, count; tree parm_tree; struct ipa_replace_map *replace_param; fibheap_t heap; long overall_size = 0, new_size = 0; long max_new_size; ipa_check_create_node_params (); ipa_check_create_edge_args (); if (dump_file) fprintf (dump_file, "\nIPA insert stage:\n\n"); dead_nodes = BITMAP_ALLOC (NULL); for (node = cgraph_nodes; node; node = node->next) if (node->analyzed) { if (node->count > max_count) max_count = node->count; overall_size += node->local.inline_summary.self_size; } max_new_size = overall_size; if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS)) max_new_size = PARAM_VALUE (PARAM_LARGE_UNIT_INSNS); max_new_size = max_new_size * PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH) / 100 + 1; /* First collect all functions we proved to have constant arguments to heap. */ heap = fibheap_new (); for (node = cgraph_nodes; node; node = node->next) { struct ipa_node_params *info; /* Propagation of the constant is forbidden in certain conditions. */ if (!node->analyzed || !ipcp_node_modifiable_p (node)) continue; info = IPA_NODE_REF (node); if (ipa_is_called_with_var_arguments (info)) continue; if (ipcp_const_param_count (node)) node->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (node), node); } /* Now clone in priority order until code size growth limits are met or heap is emptied. */ while (!fibheap_empty (heap)) { struct ipa_node_params *info; int growth = 0; bitmap args_to_skip; struct cgraph_edge *cs; node = (struct cgraph_node *)fibheap_extract_min (heap); node->aux = NULL; if (dump_file) fprintf (dump_file, "considering function %s\n", cgraph_node_name (node)); growth = ipcp_estimate_growth (node); if (new_size + growth > max_new_size) break; if (growth && optimize_function_for_size_p (DECL_STRUCT_FUNCTION (node->decl))) { if (dump_file) fprintf (dump_file, "Not versioning, cold code would grow"); continue; } info = IPA_NODE_REF (node); count = ipa_get_param_count (info); replace_trees = VEC_alloc (ipa_replace_map_p, gc, 1); if (node->local.can_change_signature) args_to_skip = BITMAP_GGC_ALLOC (); else args_to_skip = NULL; for (i = 0; i < count; i++) { struct ipcp_lattice *lat = ipcp_get_lattice (info, i); parm_tree = ipa_get_param (info, i); /* We can proactively remove obviously unused arguments. */ if (!ipa_is_param_used (info, i)) { if (args_to_skip) bitmap_set_bit (args_to_skip, i); continue; } if (lat->type == IPA_CONST_VALUE) { replace_param = ipcp_create_replace_map (parm_tree, lat); if (replace_param == NULL) break; VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_param); if (args_to_skip) bitmap_set_bit (args_to_skip, i); } } if (i < count) { if (dump_file) fprintf (dump_file, "Not versioning, some parameters couldn't be replaced"); continue; } new_size += growth; /* Look if original function becomes dead after cloning. */ for (cs = node->callers; cs != NULL; cs = cs->next_caller) if (cs->caller == node || ipcp_need_redirect_p (cs)) break; if (!cs && cgraph_will_be_removed_from_program_if_no_direct_calls (node)) bitmap_set_bit (dead_nodes, node->uid); /* Compute how many callers node has. */ node_callers = 0; for (cs = node->callers; cs != NULL; cs = cs->next_caller) node_callers++; redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers); for (cs = node->callers; cs != NULL; cs = cs->next_caller) if (!cs->indirect_inlining_edge) VEC_quick_push (cgraph_edge_p, redirect_callers, cs); /* Redirecting all the callers of the node to the new versioned node. */ node1 = cgraph_create_virtual_clone (node, redirect_callers, replace_trees, args_to_skip, "constprop"); args_to_skip = NULL; VEC_free (cgraph_edge_p, heap, redirect_callers); replace_trees = NULL; if (node1 == NULL) continue; ipcp_process_devirtualization_opportunities (node1); if (dump_file) fprintf (dump_file, "versioned function %s with growth %i, overall %i\n", cgraph_node_name (node), (int)growth, (int)new_size); ipcp_init_cloned_node (node, node1); info = IPA_NODE_REF (node); for (i = 0; i < count; i++) { struct ipcp_lattice *lat = ipcp_get_lattice (info, i); if (lat->type == IPA_CONST_VALUE) ipcp_discover_new_direct_edges (node1, i, lat->constant); } if (dump_file) dump_function_to_file (node1->decl, dump_file, dump_flags); for (cs = node->callees; cs; cs = cs->next_callee) if (cs->callee->aux) { fibheap_delete_node (heap, (fibnode_t) cs->callee->aux); cs->callee->aux = fibheap_insert (heap, ipcp_estimate_cloning_cost (cs->callee), cs->callee); } } while (!fibheap_empty (heap)) { if (dump_file) fprintf (dump_file, "skipping function %s\n", cgraph_node_name (node)); node = (struct cgraph_node *) fibheap_extract_min (heap); node->aux = NULL; } fibheap_delete (heap); BITMAP_FREE (dead_nodes); ipcp_update_callgraph (); ipcp_update_profiling (); }
/* Initialize the per SSA_NAME value-handles array. Returns it. */ void threadedge_initialize_values (void) { gcc_assert (ssa_name_values == NULL); ssa_name_values = VEC_alloc(tree, heap, num_ssa_names); }
/* Find list of values for that we want to measure histograms. */ static void rtl_find_values_to_profile (histogram_values *values) { rtx insn; unsigned i, libcall_level; life_analysis (NULL, PROP_DEATH_NOTES); *values = VEC_alloc (histogram_value, 0); libcall_level = 0; for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) { if (find_reg_note (insn, REG_LIBCALL, NULL_RTX)) libcall_level++; /* Do not instrument values inside libcalls (we are going to split block due to instrumentation, and libcall blocks should be local to a single basic block). */ if (!libcall_level) insn_values_to_profile (insn, values); if (find_reg_note (insn, REG_RETVAL, NULL_RTX)) { gcc_assert (libcall_level > 0); libcall_level--; } } gcc_assert (libcall_level == 0); for (i = 0; i < VEC_length (histogram_value, *values); i++) { histogram_value hist = VEC_index (histogram_value, *values, i); switch (hist->type) { case HIST_TYPE_INTERVAL: if (dump_file) fprintf (dump_file, "Interval counter for insn %d, range %d -- %d.\n", INSN_UID ((rtx)hist->insn), hist->hdata.intvl.int_start, (hist->hdata.intvl.int_start + hist->hdata.intvl.steps - 1)); hist->n_counters = hist->hdata.intvl.steps + (hist->hdata.intvl.may_be_less ? 1 : 0) + (hist->hdata.intvl.may_be_more ? 1 : 0); break; case HIST_TYPE_POW2: if (dump_file) fprintf (dump_file, "Pow2 counter for insn %d.\n", INSN_UID ((rtx)hist->insn)); hist->n_counters = GET_MODE_BITSIZE (hist->mode) + (hist->hdata.pow2.may_be_other ? 1 : 0); break; case HIST_TYPE_SINGLE_VALUE: if (dump_file) fprintf (dump_file, "Single value counter for insn %d.\n", INSN_UID ((rtx)hist->insn)); hist->n_counters = 3; break; case HIST_TYPE_CONST_DELTA: if (dump_file) fprintf (dump_file, "Constant delta counter for insn %d.\n", INSN_UID ((rtx)hist->insn)); hist->n_counters = 4; break; default: abort (); } } allocate_reg_info (max_reg_num (), FALSE, FALSE); }
static tree handle_latent_entropy_attribute(tree *node, tree name, tree args, int flags, bool *no_add_attrs) { tree type; unsigned long long mask; #if BUILDING_GCC_VERSION <= 4007 VEC(constructor_elt, gc) *vals; #else vec<constructor_elt, va_gc> *vals; #endif switch (TREE_CODE(*node)) { default: *no_add_attrs = true; error("%qE attribute only applies to functions and variables", name); break; case VAR_DECL: if (DECL_INITIAL(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be initialized", *node, name); break; } if (!TREE_STATIC(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be local", *node, name); break; } type = TREE_TYPE(*node); switch (TREE_CODE(type)) { default: *no_add_attrs = true; error("variable %qD with %qE attribute must be an integer or a fixed length integer array type or a fixed sized structure with integer fields", *node, name); break; case RECORD_TYPE: { tree field; unsigned int nelt = 0; for (field = TYPE_FIELDS(type); field; nelt++, field = TREE_CHAIN(field)) { tree fieldtype; fieldtype = TREE_TYPE(field); if (TREE_CODE(fieldtype) != INTEGER_TYPE) { *no_add_attrs = true; error("structure variable %qD with %qE attribute has a non-integer field %qE", *node, name, field); break; } } if (field) break; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (field = TYPE_FIELDS(type); field; field = TREE_CHAIN(field)) { tree fieldtype; fieldtype = TREE_TYPE(field); mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(fieldtype)) - 1); mask = 2 * (mask - 1) + 1; if (TYPE_UNSIGNED(fieldtype)) CONSTRUCTOR_APPEND_ELT(vals, field, build_int_cstu(fieldtype, mask & get_random_const())); else CONSTRUCTOR_APPEND_ELT(vals, field, build_int_cst(fieldtype, mask & get_random_const())); } DECL_INITIAL(*node) = build_constructor(type, vals); //debug_tree(DECL_INITIAL(*node)); break; } case INTEGER_TYPE: mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(type)) - 1); mask = 2 * (mask - 1) + 1; if (TYPE_UNSIGNED(type)) DECL_INITIAL(*node) = build_int_cstu(type, mask & get_random_const()); else DECL_INITIAL(*node) = build_int_cst(type, mask & get_random_const()); break; case ARRAY_TYPE: { tree elt_type, array_size, elt_size; unsigned int i, nelt; elt_type = TREE_TYPE(type); elt_size = TYPE_SIZE_UNIT(TREE_TYPE(type)); array_size = TYPE_SIZE_UNIT(type); if (TREE_CODE(elt_type) != INTEGER_TYPE || !array_size || TREE_CODE(array_size) != INTEGER_CST) { *no_add_attrs = true; error("array variable %qD with %qE attribute must be a fixed length integer array type", *node, name); break; } nelt = TREE_INT_CST_LOW(array_size) / TREE_INT_CST_LOW(elt_size); #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif mask = 1ULL << (TREE_INT_CST_LOW(TYPE_SIZE(elt_type)) - 1); mask = 2 * (mask - 1) + 1; for (i = 0; i < nelt; i++) if (TYPE_UNSIGNED(elt_type)) CONSTRUCTOR_APPEND_ELT(vals, size_int(i), build_int_cstu(elt_type, mask & get_random_const())); else CONSTRUCTOR_APPEND_ELT(vals, size_int(i), build_int_cst(elt_type, mask & get_random_const())); DECL_INITIAL(*node) = build_constructor(type, vals); //debug_tree(DECL_INITIAL(*node)); break; } } break; case FUNCTION_DECL: break; } return NULL_TREE; }
static tree handle_latent_entropy_attribute(tree *node, tree name, tree args __unused, int flags __unused, bool *no_add_attrs) { tree type; #if BUILDING_GCC_VERSION <= 4007 VEC(constructor_elt, gc) *vals; #else vec<constructor_elt, va_gc> *vals; #endif switch (TREE_CODE(*node)) { default: *no_add_attrs = true; error("%qE attribute only applies to functions and variables", name); break; case VAR_DECL: if (DECL_INITIAL(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be initialized", *node, name); break; } if (!TREE_STATIC(*node)) { *no_add_attrs = true; error("variable %qD with %qE attribute must not be local", *node, name); break; } type = TREE_TYPE(*node); switch (TREE_CODE(type)) { default: *no_add_attrs = true; error("variable %qD with %qE attribute must be an integer or a fixed length integer array type or a fixed sized structure with integer fields", *node, name); break; case RECORD_TYPE: { tree fld, lst = TYPE_FIELDS(type); unsigned int nelt = 0; for (fld = lst; fld; nelt++, fld = TREE_CHAIN(fld)) { tree fieldtype; fieldtype = TREE_TYPE(fld); if (TREE_CODE(fieldtype) == INTEGER_TYPE) continue; *no_add_attrs = true; error("structure variable %qD with %qE attribute has a non-integer field %qE", *node, name, fld); break; } if (fld) break; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (fld = lst; fld; fld = TREE_CHAIN(fld)) { tree random_const, fld_t = TREE_TYPE(fld); random_const = tree_get_random_const(fld_t); CONSTRUCTOR_APPEND_ELT(vals, fld, random_const); } /* Initialize the fields with random constants */ DECL_INITIAL(*node) = build_constructor(type, vals); break; } /* Initialize the variable with a random constant */ case INTEGER_TYPE: DECL_INITIAL(*node) = tree_get_random_const(type); break; case ARRAY_TYPE: { tree elt_type, array_size, elt_size; unsigned int i, nelt; HOST_WIDE_INT array_size_int, elt_size_int; elt_type = TREE_TYPE(type); elt_size = TYPE_SIZE_UNIT(TREE_TYPE(type)); array_size = TYPE_SIZE_UNIT(type); if (TREE_CODE(elt_type) != INTEGER_TYPE || !array_size || TREE_CODE(array_size) != INTEGER_CST) { *no_add_attrs = true; error("array variable %qD with %qE attribute must be a fixed length integer array type", *node, name); break; } array_size_int = TREE_INT_CST_LOW(array_size); elt_size_int = TREE_INT_CST_LOW(elt_size); nelt = array_size_int / elt_size_int; #if BUILDING_GCC_VERSION <= 4007 vals = VEC_alloc(constructor_elt, gc, nelt); #else vec_alloc(vals, nelt); #endif for (i = 0; i < nelt; i++) { tree cst = size_int(i); tree rand_cst = tree_get_random_const(elt_type); CONSTRUCTOR_APPEND_ELT(vals, cst, rand_cst); } /* * Initialize the elements of the array with random * constants */ DECL_INITIAL(*node) = build_constructor(type, vals); break; } } break; case FUNCTION_DECL: break; } return NULL_TREE; }
} /* Create a new polyhedral black box. */ void new_poly_bb (scop_p scop, void *black_box, bool reduction) { poly_bb_p pbb = XNEW (struct poly_bb); PBB_DOMAIN (pbb) = NULL; PBB_SCOP (pbb) = scop; pbb_set_black_box (pbb, black_box); PBB_TRANSFORMED (pbb) = NULL; PBB_SAVED (pbb) = NULL; PBB_ORIGINAL (pbb) = NULL; PBB_DRS (pbb) = VEC_alloc (poly_dr_p, heap, 3); PBB_IS_REDUCTION (pbb) = reduction; PBB_PDR_DUPLICATES_REMOVED (pbb) = false; VEC_safe_push (poly_bb_p, heap, SCOP_BBS (scop), pbb); } /* Free polyhedral black box. */ void free_poly_bb (poly_bb_p pbb) { int i; poly_dr_p pdr; ppl_delete_Pointset_Powerset_C_Polyhedron (PBB_DOMAIN (pbb));
tree ubsan_create_data (const char *name, location_t loc, ...) { va_list args; tree ret, t; tree fields[3]; VEC(tree, gc) *saved_args = NULL; size_t i = 0; /* Firstly, create a pointer to type descriptor type. */ tree td_type = ubsan_type_descriptor_type (); TYPE_READONLY (td_type) = 1; td_type = build_pointer_type (td_type); /* Create the structure type. */ ret = make_node (RECORD_TYPE); if (loc != UNKNOWN_LOCATION) { fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, ubsan_source_location_type ()); DECL_CONTEXT (fields[i]) = ret; i++; } va_start (args, loc); for (t = va_arg (args, tree); t != NULL_TREE; i++, t = va_arg (args, tree)) { gcc_checking_assert (i < 3); /* Save the tree argument for later use. */ VEC_safe_push (tree, gc, saved_args, t); fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, td_type); DECL_CONTEXT (fields[i]) = ret; if (i) DECL_CHAIN (fields[i - 1]) = fields[i]; } TYPE_FIELDS (ret) = fields[0]; TYPE_NAME (ret) = get_identifier (name); layout_type (ret); va_end (args); /* Now, fill in the type. */ char tmp_name[32]; static unsigned int ubsan_var_id_num; ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++); tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name), ret); TREE_STATIC (var) = 1; TREE_PUBLIC (var) = 0; DECL_ARTIFICIAL (var) = 1; DECL_IGNORED_P (var) = 1; DECL_EXTERNAL (var) = 0; VEC(constructor_elt, gc) *v; v = VEC_alloc (constructor_elt, gc, i); tree ctor = build_constructor (ret, v); /* If desirable, set the __ubsan_source_location element. */ if (loc != UNKNOWN_LOCATION) CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc)); size_t nelts = VEC_length (tree, saved_args); for (i = 0; i < nelts; i++) { t = VEC_index (tree, saved_args, i); CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t); } TREE_CONSTANT (ctor) = 1; TREE_STATIC (ctor) = 1; DECL_INITIAL (var) = ctor; rest_of_decl_compilation (var, 1, 0); return var; }