void gfc_trans_runtime_check (tree cond, const char * msgid, stmtblock_t * pblock, locus * where) { stmtblock_t block; tree body; tree tmp; tree args; char * message; int line; if (integer_zerop (cond)) return; /* The code to generate the error. */ gfc_start_block (&block); if (where) { #ifdef USE_MAPPED_LOCATION line = LOCATION_LINE (where->lb->location); #else line = where->lb->linenum; #endif asprintf (&message, "%s (in file '%s', at line %d)", _(msgid), where->lb->file->filename, line); } else asprintf (&message, "%s (in file '%s', around line %d)", _(msgid), gfc_source_file, input_line + 1); tmp = gfc_build_addr_expr (pchar_type_node, gfc_build_cstring_const(message)); gfc_free(message); args = gfc_chainon_list (NULL_TREE, tmp); tmp = build_function_call_expr (gfor_fndecl_runtime_error, args); gfc_add_expr_to_block (&block, tmp); body = gfc_finish_block (&block); if (integer_onep (cond)) { gfc_add_expr_to_block (pblock, body); } else { /* Tell the compiler that this isn't likely. */ cond = fold_convert (long_integer_type_node, cond); tmp = gfc_chainon_list (NULL_TREE, cond); tmp = gfc_chainon_list (tmp, build_int_cst (long_integer_type_node, 0)); cond = build_function_call_expr (built_in_decls[BUILT_IN_EXPECT], tmp); cond = fold_convert (boolean_type_node, cond); tmp = build3_v (COND_EXPR, cond, body, build_empty_stmt ()); gfc_add_expr_to_block (pblock, tmp); } }
static void emit_mfence_after_loop (struct loop *loop) { VEC (edge, heap) *exits = get_loop_exit_edges (loop); edge exit; tree call; block_stmt_iterator bsi; unsigned i; for (i = 0; VEC_iterate (edge, exits, i, exit); i++) { call = build_function_call_expr (FENCE_FOLLOWING_MOVNT, NULL_TREE); if (!single_pred_p (exit->dest) /* If possible, we prefer not to insert the fence on other paths in cfg. */ && !(exit->flags & EDGE_ABNORMAL)) split_loop_exit_edge (exit); bsi = bsi_after_labels (exit->dest); bsi_insert_before (&bsi, call, BSI_NEW_STMT); mark_virtual_ops_for_renaming (call); } VEC_free (edge, heap, exits); update_ssa (TODO_update_ssa_only_virtuals); }
void c_finish_omp_barrier (void) { tree x; x = built_in_decls[BUILT_IN_GOMP_BARRIER]; x = build_function_call_expr (x, NULL); add_stmt (x); }
void c_finish_omp_flush (void) { tree x; x = built_in_decls[BUILT_IN_SYNCHRONIZE]; x = build_function_call_expr (x, NULL); add_stmt (x); }
static void tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base) { tree stmt = value->hvalue.stmt; block_stmt_iterator bsi = bsi_for_stmt (stmt); tree ref = tree_coverage_counter_ref (tag, base), ref_ptr; tree args, call, val; ref_ptr = force_gimple_operand_bsi (&bsi, build_addr (ref, current_function_decl), true, NULL_TREE); val = prepare_instrumented_value (&bsi, value); args = tree_cons (NULL_TREE, ref_ptr, tree_cons (NULL_TREE, val, NULL_TREE)); call = build_function_call_expr (tree_one_value_profiler_fn, args); bsi_insert_before (&bsi, call, BSI_SAME_STMT); }
static void mudflap_register_call (tree obj, tree object_size, tree varname) { tree arg, args, call_stmt; args = tree_cons (NULL_TREE, varname, NULL_TREE); arg = build_int_cst (NULL_TREE, 4); /* __MF_TYPE_STATIC */ args = tree_cons (NULL_TREE, arg, args); arg = convert (size_type_node, object_size); args = tree_cons (NULL_TREE, arg, args); arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj); arg = convert (ptr_type_node, arg); args = tree_cons (NULL_TREE, arg, args); call_stmt = build_function_call_expr (mf_register_fndecl, args); append_to_statement_list (call_stmt, &enqueued_call_stmt_chain); }
void write_resource_constructor (tree *list_p) { tree iter, t, register_resource_fn; if (resources == NULL) return; t = build_function_type_list (void_type_node, ptr_type_node, NULL); t = build_decl (FUNCTION_DECL, get_identifier ("_Jv_RegisterResource"), t); TREE_PUBLIC (t) = 1; DECL_EXTERNAL (t) = 1; register_resource_fn = t; /* Write out entries in the same order in which they were defined. */ for (iter = nreverse (resources); iter ; iter = TREE_CHAIN (iter)) { t = build_fold_addr_expr (TREE_VALUE (iter)); t = tree_cons (NULL, t, NULL); t = build_function_call_expr (register_resource_fn, t); append_to_statement_list (t, list_p); } }
static void build_cdtor (bool ctor_p, tree *cdtors, size_t len) { size_t i; i = 0; while (i < len) { tree body; tree fn; priority_type priority; priority = 0; body = NULL_TREE; /* Find the next batch of constructors/destructors with the same initialization priority. */ do { priority_type p; fn = cdtors[i]; p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); if (!body) priority = p; else if (p != priority) break; append_to_statement_list (build_function_call_expr (fn, 0), &body); ++i; } while (i < len); gcc_assert (body != NULL_TREE); /* Generate a function to call all the function of like priority. */ cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority); } }
static void issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead) { HOST_WIDE_INT delta; tree addr, addr_base, prefetch, params, write_p; block_stmt_iterator bsi; unsigned n_prefetches, ap; if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Issued prefetch for %p.\n", (void *) ref); bsi = bsi_for_stmt (ref->stmt); n_prefetches = ((unroll_factor + ref->prefetch_mod - 1) / ref->prefetch_mod); addr_base = build_fold_addr_expr_with_type (ref->mem, ptr_type_node); addr_base = force_gimple_operand_bsi (&bsi, unshare_expr (addr_base), true, NULL); for (ap = 0; ap < n_prefetches; ap++) { /* Determine the address to prefetch. */ delta = (ahead + ap * ref->prefetch_mod) * ref->group->step; addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr_base, build_int_cst (ptr_type_node, delta)); addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL); /* Create the prefetch instruction. */ write_p = ref->write_p ? integer_one_node : integer_zero_node; params = tree_cons (NULL_TREE, addr, tree_cons (NULL_TREE, write_p, NULL_TREE)); prefetch = build_function_call_expr (built_in_decls[BUILT_IN_PREFETCH], params); bsi_insert_before (&bsi, prefetch, BSI_SAME_STMT); } }
static tree gfc_trans_omp_barrier (void) { tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER]; return build_function_call_expr (decl, NULL); }
static tree gfc_trans_omp_flush (void) { tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE]; return build_function_call_expr (decl, NULL); }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. */ static void mx_register_decls (tree decl, tree *stmt_list) { tree finally_stmts = NULL_TREE; tree_stmt_iterator initially_stmts = tsi_start (*stmt_list); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; tree unregister_fncall, unregister_fncall_params; tree register_fncall, register_fncall_params; size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), NULL_TREE))); /* __mf_unregister (...) */ unregister_fncall = build_function_call_expr (mf_unregister_fndecl, unregister_fncall_params); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ variable_name = mf_varname_tree (decl); register_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), tree_cons (NULL_TREE, variable_name, NULL_TREE)))); /* __mf_register (...) */ register_fncall = build_function_call_expr (mf_register_fndecl, register_fncall_params); /* Accumulate the two calls. */ /* ??? Set EXPR_LOCATION. */ gimplify_stmt (®ister_fncall); gimplify_stmt (&unregister_fncall); /* Add the __mf_register call at the current appending point. */ if (tsi_end_p (initially_stmts)) warning (0, "mudflap cannot track %qs in stub function", IDENTIFIER_POINTER (DECL_NAME (decl))); else { tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT); /* Accumulate the FINALLY piece. */ append_to_statement_list (unregister_fncall, &finally_stmts); } mf_mark (decl); } decl = TREE_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL_TREE) { tree t = build2 (TRY_FINALLY_EXPR, void_type_node, *stmt_list, finally_stmts); *stmt_list = NULL; append_to_statement_list (t, stmt_list); } }
static void mf_build_check_statement_for (tree base, tree limit, block_stmt_iterator *instr_bsi, location_t *locus, tree dirflag) { tree_stmt_iterator head, tsi; block_stmt_iterator bsi; basic_block cond_bb, then_bb, join_bb; edge e; tree cond, t, u, v; tree mf_base; tree mf_elem; tree mf_limit; /* We first need to split the current basic block, and start altering the CFG. This allows us to insert the statements we're about to construct into the right basic blocks. */ cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi)); bsi = *instr_bsi; bsi_prev (&bsi); if (! bsi_end_p (bsi)) e = split_block (cond_bb, bsi_stmt (bsi)); else e = split_block_after_labels (cond_bb); cond_bb = e->src; join_bb = e->dest; /* A recap at this point: join_bb is the basic block at whose head is the gimple statement for which this check expression is being built. cond_bb is the (possibly new, synthetic) basic block the end of which will contain the cache-lookup code, and a conditional that jumps to the cache-miss code or, much more likely, over to join_bb. */ /* Create the bb that contains the cache-miss fallback block (mf_check). */ then_bb = create_empty_bb (cond_bb); make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU); /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */ e = find_edge (cond_bb, join_bb); e->flags = EDGE_FALSE_VALUE; e->count = cond_bb->count; e->probability = REG_BR_PROB_BASE; /* Update dominance info. Note that bb_join's data was updated by split_block. */ if (dom_info_available_p (CDI_DOMINATORS)) { set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb); } /* Build our local variables. */ mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem"); mf_base = create_tmp_var (mf_uintptr_type, "__mf_base"); mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit"); /* Build: __mf_base = (uintptr_t) <base address expression>. */ t = build2 (MODIFY_EXPR, void_type_node, mf_base, convert (mf_uintptr_type, unshare_expr (base))); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); head = tsi_start (t); tsi = tsi_last (t); /* Build: __mf_limit = (uintptr_t) <limit address expression>. */ t = build2 (MODIFY_EXPR, void_type_node, mf_limit, convert (mf_uintptr_type, unshare_expr (limit))); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift) & __mf_mask]. */ t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base, (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l)); t = build2 (BIT_AND_EXPR, mf_uintptr_type, t, (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l)); t = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (mf_cache_array_decl)), mf_cache_array_decl, t, NULL_TREE, NULL_TREE); t = build1 (ADDR_EXPR, mf_cache_structptr_type, t); t = build2 (MODIFY_EXPR, void_type_node, mf_elem, t); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Quick validity check. if (__mf_elem->low > __mf_base || (__mf_elem_high < __mf_limit)) { __mf_check (); ... and only if single-threaded: __mf_lookup_shift_1 = f...; __mf_lookup_mask_l = ...; } It is expected that this body of code is rarely executed so we mark the edge to the THEN clause of the conditional jump as unlikely. */ /* Construct t <-- '__mf_elem->low > __mf_base'. */ t = build3 (COMPONENT_REF, mf_uintptr_type, build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), TYPE_FIELDS (mf_cache_struct_type), NULL_TREE); t = build2 (GT_EXPR, boolean_type_node, t, mf_base); /* Construct '__mf_elem->high < __mf_limit'. First build: 1) u <-- '__mf_elem->high' 2) v <-- '__mf_limit'. Then build 'u <-- (u < v). */ u = build3 (COMPONENT_REF, mf_uintptr_type, build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE); v = mf_limit; u = build2 (LT_EXPR, boolean_type_node, u, v); /* Build the composed conditional: t <-- 't || u'. Then store the result of the evaluation of 't' in a temporary variable which we can use as the condition for the conditional jump. */ t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u); cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond"); t = build2 (MODIFY_EXPR, boolean_type_node, cond, t); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Build the conditional jump. 'cond' is just a temporary so we can simply build a void COND_EXPR. We do need labels in both arms though. */ t = build3 (COND_EXPR, void_type_node, cond, build1 (GOTO_EXPR, void_type_node, tree_block_label (then_bb)), build1 (GOTO_EXPR, void_type_node, tree_block_label (join_bb))); SET_EXPR_LOCUS (t, locus); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* At this point, after so much hard work, we have only constructed the conditional jump, if (__mf_elem->low > __mf_base || (__mf_elem_high < __mf_limit)) The lowered GIMPLE tree representing this code is in the statement list starting at 'head'. We can insert this now in the current basic block, i.e. the one that the statement we're instrumenting was originally in. */ bsi = bsi_last (cond_bb); for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); /* Now build up the body of the cache-miss handling: __mf_check(); refresh *_l vars. This is the body of the conditional. */ u = tree_cons (NULL_TREE, mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION : *locus), NULL_TREE); u = tree_cons (NULL_TREE, dirflag, u); /* NB: we pass the overall [base..limit] range to mf_check. */ u = tree_cons (NULL_TREE, fold_build2 (PLUS_EXPR, integer_type_node, fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base), integer_one_node), u); u = tree_cons (NULL_TREE, mf_base, u); t = build_function_call_expr (mf_check_fndecl, u); gimplify_to_stmt_list (&t); head = tsi_start (t); tsi = tsi_last (t); if (! flag_mudflap_threads) { t = build2 (MODIFY_EXPR, void_type_node, mf_cache_shift_decl_l, mf_cache_shift_decl); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); t = build2 (MODIFY_EXPR, void_type_node, mf_cache_mask_decl_l, mf_cache_mask_decl); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); } /* Insert the check code in the THEN block. */ bsi = bsi_start (then_bb); for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); *instr_bsi = bsi_start (join_bb); bsi_next (instr_bsi); }
/* Emit any file-wide instrumentation. */ void mudflap_finish_file (void) { tree ctor_statements = NULL_TREE; /* No need to continue when there were errors. */ if (errorcount != 0 || sorrycount != 0) return; /* Insert a call to __mf_init. */ { tree call2_stmt = build_function_call_expr (mf_init_fndecl, NULL_TREE); append_to_statement_list (call2_stmt, &ctor_statements); } /* If appropriate, call __mf_set_options to pass along read-ignore mode. */ if (flag_mudflap_ignore_reads) { tree arg = tree_cons (NULL_TREE, mf_build_string ("-ignore-reads"), NULL_TREE); tree call_stmt = build_function_call_expr (mf_set_options_fndecl, arg); append_to_statement_list (call_stmt, &ctor_statements); } /* Process all enqueued object decls. */ if (deferred_static_decls) { size_t i; tree obj; for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++) { gcc_assert (DECL_P (obj)); if (mf_marked_p (obj)) continue; /* Omit registration for static unaddressed objects. NB: Perform registration for non-static objects regardless of TREE_USED or TREE_ADDRESSABLE, because they may be used from other compilation units. */ if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj)) continue; if (! COMPLETE_TYPE_P (TREE_TYPE (obj))) { warning (0, "mudflap cannot track unknown size extern %qs", IDENTIFIER_POINTER (DECL_NAME (obj))); continue; } mudflap_register_call (obj, size_in_bytes (TREE_TYPE (obj)), mf_varname_tree (obj)); } VEC_truncate (tree, deferred_static_decls, 0); } /* Append all the enqueued registration calls. */ if (enqueued_call_stmt_chain) { append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements); enqueued_call_stmt_chain = NULL_TREE; } cgraph_build_static_cdtor ('I', ctor_statements, MAX_RESERVED_INIT_PRIORITY-1); }