tree tree_code_create_variable (unsigned int storage_class, unsigned char* chars, unsigned int length, unsigned int expression_type, tree init, unsigned char* filename, int lineno) { tree var_type; tree var_id; tree var_decl; /* 1. Build the type. */ var_type = get_type_for_numeric_type (expression_type); /* 2. Build the name. */ if (chars[length] != 0) abort (); /* Should be null terminated. */ var_id = get_identifier ((const char*)chars); /* 3. Build the decl and set up init. */ var_decl = build_decl (VAR_DECL, var_id, var_type); /* 3a. Initialization. */ if (init) DECL_INITIAL (var_decl) = build1 (CONVERT_EXPR, var_type, init); else DECL_INITIAL (var_decl) = NULL_TREE; /* 4. Compute size etc. */ layout_decl (var_decl, 0); if (TYPE_SIZE (var_type) == 0) abort (); /* Did not calculate size. */ DECL_CONTEXT (var_decl) = current_function_decl; DECL_SOURCE_FILE (var_decl) = (const char *)filename; DECL_SOURCE_LINE (var_decl) = lineno; /* Set the storage mode and whether only visible in the same file. */ switch (storage_class) { case STATIC_STORAGE: TREE_STATIC (var_decl) = 1; TREE_PUBLIC (var_decl) = 0; break; case AUTOMATIC_STORAGE: TREE_STATIC (var_decl) = 0; TREE_PUBLIC (var_decl) = 0; break; case EXTERNAL_DEFINITION_STORAGE: TREE_STATIC (var_decl) = 0; TREE_PUBLIC (var_decl) = 1; break; case EXTERNAL_REFERENCE_STORAGE: DECL_EXTERNAL (var_decl) = 1; TREE_PUBLIC (var_decl) = 0; break; default: abort (); } /* This should really only be set if the variable is used. */ TREE_USED (var_decl) = 1; /* Expand declaration and initial value if any. */ if (TREE_STATIC (var_decl)) rest_of_decl_compilation (var_decl, 0, 0, 0); else { expand_decl (var_decl); if (DECL_INITIAL (var_decl)) expand_decl_init (var_decl); } return pushdecl (copy_node (var_decl)); }
static tree cp_convert_to_pointer (tree type, tree expr) { tree intype = TREE_TYPE (expr); enum tree_code form; tree rval; if (intype == error_mark_node) return error_mark_node; if (MAYBE_CLASS_TYPE_P (intype)) { intype = complete_type (intype); if (!COMPLETE_TYPE_P (intype)) { error ("can't convert from incomplete type %qT to %qT", intype, type); return error_mark_node; } rval = build_type_conversion (type, expr); if (rval) { if (rval == error_mark_node) error ("conversion of %qE from %qT to %qT is ambiguous", expr, intype, type); return rval; } } /* Handle anachronistic conversions from (::*)() to cv void* or (*)(). */ if (TREE_CODE (type) == POINTER_TYPE && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE || VOID_TYPE_P (TREE_TYPE (type)))) { if (TYPE_PTRMEMFUNC_P (intype) || TREE_CODE (intype) == METHOD_TYPE) return convert_member_func_to_ptr (type, expr); if (TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE) return build_nop (type, expr); intype = TREE_TYPE (expr); } if (expr == error_mark_node) return error_mark_node; form = TREE_CODE (intype); if (POINTER_TYPE_P (intype)) { intype = TYPE_MAIN_VARIANT (intype); if (TYPE_MAIN_VARIANT (type) != intype && TREE_CODE (type) == POINTER_TYPE && TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE && MAYBE_CLASS_TYPE_P (TREE_TYPE (type)) && MAYBE_CLASS_TYPE_P (TREE_TYPE (intype)) && TREE_CODE (TREE_TYPE (intype)) == RECORD_TYPE) { enum tree_code code = PLUS_EXPR; tree binfo; tree intype_class; tree type_class; bool same_p; intype_class = TREE_TYPE (intype); type_class = TREE_TYPE (type); same_p = same_type_p (TYPE_MAIN_VARIANT (intype_class), TYPE_MAIN_VARIANT (type_class)); binfo = NULL_TREE; /* Try derived to base conversion. */ if (!same_p) binfo = lookup_base (intype_class, type_class, ba_check, NULL); if (!same_p && !binfo) { /* Try base to derived conversion. */ binfo = lookup_base (type_class, intype_class, ba_check, NULL); code = MINUS_EXPR; } if (binfo == error_mark_node) return error_mark_node; if (binfo || same_p) { if (binfo) expr = build_base_path (code, expr, binfo, 0); /* Add any qualifier conversions. */ return build_nop (type, expr); } } if (TYPE_PTRMEMFUNC_P (type)) { error ("cannot convert %qE from type %qT to type %qT", expr, intype, type); return error_mark_node; } return build_nop (type, expr); } else if ((TYPE_PTRMEM_P (type) && TYPE_PTRMEM_P (intype)) || (TYPE_PTRMEMFUNC_P (type) && TYPE_PTRMEMFUNC_P (intype))) return convert_ptrmem (type, expr, /*allow_inverse_p=*/false, /*c_cast_p=*/false); else if (TYPE_PTRMEMFUNC_P (intype)) { if (!warn_pmf2ptr) { if (TREE_CODE (expr) == PTRMEM_CST) return cp_convert_to_pointer (type, PTRMEM_CST_MEMBER (expr)); else if (TREE_CODE (expr) == OFFSET_REF) { tree object = TREE_OPERAND (expr, 0); return get_member_function_from_ptrfunc (&object, TREE_OPERAND (expr, 1)); } } error ("cannot convert %qE from type %qT to type %qT", expr, intype, type); return error_mark_node; } if (integer_zerop (expr)) { if (TYPE_PTRMEMFUNC_P (type)) return build_ptrmemfunc (TYPE_PTRMEMFUNC_FN_TYPE (type), expr, 0, /*c_cast_p=*/false); if (TYPE_PTRMEM_P (type)) { /* A NULL pointer-to-member is represented by -1, not by zero. */ expr = build_int_cst_type (type, -1); } else expr = build_int_cst (type, 0); return expr; } else if (TYPE_PTR_TO_MEMBER_P (type) && INTEGRAL_CODE_P (form)) { error ("invalid conversion from %qT to %qT", intype, type); return error_mark_node; } if (INTEGRAL_CODE_P (form)) { if (TYPE_PRECISION (intype) == POINTER_SIZE) return build1 (CONVERT_EXPR, type, expr); expr = cp_convert (c_common_type_for_size (POINTER_SIZE, 0), expr); /* Modes may be different but sizes should be the same. There is supposed to be some integral type that is the same width as a pointer. */ gcc_assert (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (expr))) == GET_MODE_SIZE (TYPE_MODE (type))); return convert_to_pointer (type, expr); } if (type_unknown_p (expr)) return instantiate_type (type, expr, tf_warning_or_error); error ("cannot convert %qE from type %qT to type %qT", expr, intype, type); return error_mark_node; }
tree expand_start_catch_block (tree decl) { tree exp; tree type; if (! doing_eh (1)) return NULL_TREE; /* Make sure this declaration is reasonable. */ if (decl && !complete_ptr_ref_or_void_ptr_p (TREE_TYPE (decl), NULL_TREE)) decl = NULL_TREE; if (decl) type = prepare_eh_type (TREE_TYPE (decl)); else type = NULL_TREE; if (decl && decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ exp = build_exc_ptr (); exp = build1 (NOP_EXPR, build_pointer_type (type), exp); exp = build2 (MINUS_EXPR, TREE_TYPE (exp), exp, TYPE_SIZE_UNIT (TREE_TYPE (exp))); exp = build_indirect_ref (exp, NULL); initialize_handler_parm (decl, exp); return type; } /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL) /* APPLE LOCAL radar 2848255 */ finish_expr_stmt (do_begin_catch (NULL_TREE)); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ /* APPLE LOCAL begin mainline 2006-02-24 4086777 */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) /* APPLE LOCAL end mainline 2006-02-24 4086777 */ { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); /* APPLE LOCAL radar 2848255 */ finish_expr_stmt (do_begin_catch (type)); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { /* APPLE LOCAL radar 2848255 */ tree init = do_begin_catch (type); exp = create_temporary_var (ptr_type_node); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, NULL_TREE, LOOKUP_ONLYCONVERTING); finish_expr_stmt (build_modify_expr (exp, INIT_EXPR, init)); initialize_handler_parm (decl, exp); } return type; }
static tree convert_real (tree method_return_type, tree orig_call) { return build1 (VIEW_CONVERT_EXPR, method_return_type, CALL_EXPR_ARG (orig_call, 0)); }
tree c_finish_omp_atomic (location_t loc, enum tree_code code, enum tree_code opcode, tree lhs, tree rhs, tree v, tree lhs1, tree rhs1, bool swapped, bool seq_cst) { tree x, type, addr, pre = NULL_TREE; if (lhs == error_mark_node || rhs == error_mark_node || v == error_mark_node || lhs1 == error_mark_node || rhs1 == error_mark_node) return error_mark_node; /* ??? According to one reading of the OpenMP spec, complex type are supported, but there are no atomic stores for any architecture. But at least icc 9.0 doesn't support complex types here either. And lets not even talk about vector types... */ type = TREE_TYPE (lhs); if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type) && !SCALAR_FLOAT_TYPE_P (type)) { error_at (loc, "invalid expression type for %<#pragma omp atomic%>"); return error_mark_node; } if (opcode == RDIV_EXPR) opcode = TRUNC_DIV_EXPR; /* ??? Validate that rhs does not overlap lhs. */ /* Take and save the address of the lhs. From then on we'll reference it via indirection. */ addr = build_unary_op (loc, ADDR_EXPR, lhs, 0); if (addr == error_mark_node) return error_mark_node; addr = save_expr (addr); if (TREE_CODE (addr) != SAVE_EXPR && (TREE_CODE (addr) != ADDR_EXPR || !VAR_P (TREE_OPERAND (addr, 0)))) { /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize it even after unsharing function body. */ tree var = create_tmp_var_raw (TREE_TYPE (addr)); DECL_CONTEXT (var) = current_function_decl; addr = build4 (TARGET_EXPR, TREE_TYPE (addr), var, addr, NULL, NULL); } lhs = build_indirect_ref (loc, addr, RO_NULL); if (code == OMP_ATOMIC_READ) { x = build1 (OMP_ATOMIC_READ, type, addr); SET_EXPR_LOCATION (x, loc); OMP_ATOMIC_SEQ_CST (x) = seq_cst; return build_modify_expr (loc, v, NULL_TREE, NOP_EXPR, loc, x, NULL_TREE); } /* There are lots of warnings, errors, and conversions that need to happen in the course of interpreting a statement. Use the normal mechanisms to do this, and then take it apart again. */ if (swapped) { rhs = build_binary_op (loc, opcode, rhs, lhs, 1); opcode = NOP_EXPR; } bool save = in_late_binary_op; in_late_binary_op = true; x = build_modify_expr (loc, lhs, NULL_TREE, opcode, loc, rhs, NULL_TREE); in_late_binary_op = save; if (x == error_mark_node) return error_mark_node; if (TREE_CODE (x) == COMPOUND_EXPR) { pre = TREE_OPERAND (x, 0); gcc_assert (TREE_CODE (pre) == SAVE_EXPR); x = TREE_OPERAND (x, 1); } gcc_assert (TREE_CODE (x) == MODIFY_EXPR); rhs = TREE_OPERAND (x, 1); /* Punt the actual generation of atomic operations to common code. */ if (code == OMP_ATOMIC) type = void_type_node; x = build2 (code, type, addr, rhs); SET_EXPR_LOCATION (x, loc); OMP_ATOMIC_SEQ_CST (x) = seq_cst; /* Generally it is hard to prove lhs1 and lhs are the same memory location, just diagnose different variables. */ if (rhs1 && VAR_P (rhs1) && VAR_P (lhs) && rhs1 != lhs) { if (code == OMP_ATOMIC) error_at (loc, "%<#pragma omp atomic update%> uses two different variables for memory"); else error_at (loc, "%<#pragma omp atomic capture%> uses two different variables for memory"); return error_mark_node; } if (code != OMP_ATOMIC) { /* Generally it is hard to prove lhs1 and lhs are the same memory location, just diagnose different variables. */ if (lhs1 && VAR_P (lhs1) && VAR_P (lhs)) { if (lhs1 != lhs) { error_at (loc, "%<#pragma omp atomic capture%> uses two different variables for memory"); return error_mark_node; } } x = build_modify_expr (loc, v, NULL_TREE, NOP_EXPR, loc, x, NULL_TREE); if (rhs1 && rhs1 != lhs) { tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, 0); if (rhs1addr == error_mark_node) return error_mark_node; x = omit_one_operand_loc (loc, type, x, rhs1addr); } if (lhs1 && lhs1 != lhs) { tree lhs1addr = build_unary_op (loc, ADDR_EXPR, lhs1, 0); if (lhs1addr == error_mark_node) return error_mark_node; if (code == OMP_ATOMIC_CAPTURE_OLD) x = omit_one_operand_loc (loc, type, x, lhs1addr); else { x = save_expr (x); x = omit_two_operands_loc (loc, type, x, x, lhs1addr); } } } else if (rhs1 && rhs1 != lhs) { tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, 0); if (rhs1addr == error_mark_node) return error_mark_node; x = omit_one_operand_loc (loc, type, x, rhs1addr); } if (pre) x = omit_one_operand_loc (loc, type, x, pre); return x; }
/* The method walks the node hierarchy to the topmost node. This is exactly how its done in mudflap and has been borrowed. */ static tree mf_walk_comp_ref(tree *tp, tree type, location_t location, \ tree *addr_store, tree *base_store) { tree var, t, addr, base, size; t = *tp; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var) || TREE_CODE (var) == MEM_REF) { base = TREE_OPERAND (var, 0); break; } else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) { var = TREE_OPERAND (var, 0); if (CONSTANT_CLASS_P (var) && TREE_CODE (var) != STRING_CST) return NULL_TREE; } else { DEBUGLOG("TREE_CODE(temp) : %s comp_ref_only = %d eligigle = %d\n", \ tree_code_name[(int)TREE_CODE(var)], component_ref_only, \ mf_decl_eligible_p(var)); gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == SSA_NAME /* TODO: Check this */ || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var)) //TODO is this needed? || component_ref_only) return NULL_TREE; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base); addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node, addr, fold_convert_loc (location, sizetype, byte_position (field))); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); if (addr_store) *addr_store = addr; if (base_store) *base_store = addr; return var; }
static tree gfc_trans_omp_atomic (gfc_code *code) { gfc_se lse; gfc_se rse; gfc_expr *expr2, *e; gfc_symbol *var; stmtblock_t block; tree lhsaddr, type, rhs, x; enum tree_code op = ERROR_MARK; bool var_on_left = false; code = code->block->next; gcc_assert (code->op == EXEC_ASSIGN); gcc_assert (code->next == NULL); var = code->expr->symtree->n.sym; gfc_init_se (&lse, NULL); gfc_init_se (&rse, NULL); gfc_start_block (&block); gfc_conv_expr (&lse, code->expr); gfc_add_block_to_block (&block, &lse.pre); type = TREE_TYPE (lse.expr); lhsaddr = gfc_build_addr_expr (NULL, lse.expr); expr2 = code->expr2; if (expr2->expr_type == EXPR_FUNCTION && expr2->value.function.isym->generic_id == GFC_ISYM_CONVERSION) expr2 = expr2->value.function.actual->expr; if (expr2->expr_type == EXPR_OP) { gfc_expr *e; switch (expr2->value.op.operator) { case INTRINSIC_PLUS: op = PLUS_EXPR; break; case INTRINSIC_TIMES: op = MULT_EXPR; break; case INTRINSIC_MINUS: op = MINUS_EXPR; break; case INTRINSIC_DIVIDE: if (expr2->ts.type == BT_INTEGER) op = TRUNC_DIV_EXPR; else op = RDIV_EXPR; break; case INTRINSIC_AND: op = TRUTH_ANDIF_EXPR; break; case INTRINSIC_OR: op = TRUTH_ORIF_EXPR; break; case INTRINSIC_EQV: op = EQ_EXPR; break; case INTRINSIC_NEQV: op = NE_EXPR; break; default: gcc_unreachable (); } e = expr2->value.op.op1; if (e->expr_type == EXPR_FUNCTION && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; if (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var) { expr2 = expr2->value.op.op2; var_on_left = true; } else { e = expr2->value.op.op2; if (e->expr_type == EXPR_FUNCTION && e->value.function.isym->generic_id == GFC_ISYM_CONVERSION) e = e->value.function.actual->expr; gcc_assert (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var); expr2 = expr2->value.op.op1; var_on_left = false; } gfc_conv_expr (&rse, expr2); gfc_add_block_to_block (&block, &rse.pre); } else { gcc_assert (expr2->expr_type == EXPR_FUNCTION); switch (expr2->value.function.isym->generic_id) { case GFC_ISYM_MIN: op = MIN_EXPR; break; case GFC_ISYM_MAX: op = MAX_EXPR; break; case GFC_ISYM_IAND: op = BIT_AND_EXPR; break; case GFC_ISYM_IOR: op = BIT_IOR_EXPR; break; case GFC_ISYM_IEOR: op = BIT_XOR_EXPR; break; default: gcc_unreachable (); } e = expr2->value.function.actual->expr; gcc_assert (e->expr_type == EXPR_VARIABLE && e->symtree != NULL && e->symtree->n.sym == var); gfc_conv_expr (&rse, expr2->value.function.actual->next->expr); gfc_add_block_to_block (&block, &rse.pre); if (expr2->value.function.actual->next->next != NULL) { tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL); gfc_actual_arglist *arg; gfc_add_modify_expr (&block, accum, rse.expr); for (arg = expr2->value.function.actual->next->next; arg; arg = arg->next) { gfc_init_block (&rse.pre); gfc_conv_expr (&rse, arg->expr); gfc_add_block_to_block (&block, &rse.pre); x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr); gfc_add_modify_expr (&block, accum, x); } rse.expr = accum; } expr2 = expr2->value.function.actual->next->expr; } lhsaddr = save_expr (lhsaddr); rhs = gfc_evaluate_now (rse.expr, &block); x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr)); if (var_on_left) x = fold_build2 (op, TREE_TYPE (rhs), x, rhs); else x = fold_build2 (op, TREE_TYPE (rhs), rhs, x); if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE && TREE_CODE (type) != COMPLEX_TYPE) x = build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x); x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x)); gfc_add_expr_to_block (&block, x); gfc_add_block_to_block (&block, &lse.pre); gfc_add_block_to_block (&block, &rse.pre); return gfc_finish_block (&block); }
static gimple input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in, struct function *fn, enum LTO_tags tag) { gimple stmt; enum gimple_code code; unsigned HOST_WIDE_INT num_ops; size_t i; struct bitpack_d bp; bool has_hist; code = lto_tag_to_gimple_code (tag); /* Read the tuple header. */ bp = streamer_read_bitpack (ib); num_ops = bp_unpack_var_len_unsigned (&bp); stmt = gimple_alloc (code, num_ops); stmt->gsbase.no_warning = bp_unpack_value (&bp, 1); if (is_gimple_assign (stmt)) stmt->gsbase.nontemporal_move = bp_unpack_value (&bp, 1); stmt->gsbase.has_volatile_ops = bp_unpack_value (&bp, 1); has_hist = bp_unpack_value (&bp, 1); stmt->gsbase.subcode = bp_unpack_var_len_unsigned (&bp); /* Read location information. */ gimple_set_location (stmt, stream_input_location (&bp, data_in)); /* Read lexical block reference. */ gimple_set_block (stmt, stream_read_tree (ib, data_in)); /* Read in all the operands. */ switch (code) { case GIMPLE_RESX: gimple_resx_set_region (stmt, streamer_read_hwi (ib)); break; case GIMPLE_EH_MUST_NOT_THROW: gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in)); break; case GIMPLE_EH_DISPATCH: gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib)); break; case GIMPLE_ASM: { /* FIXME lto. Move most of this into a new gimple_asm_set_string(). */ tree str; stmt->gimple_asm.ni = streamer_read_uhwi (ib); stmt->gimple_asm.no = streamer_read_uhwi (ib); stmt->gimple_asm.nc = streamer_read_uhwi (ib); stmt->gimple_asm.nl = streamer_read_uhwi (ib); str = streamer_read_string_cst (data_in, ib); stmt->gimple_asm.string = TREE_STRING_POINTER (str); } /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < num_ops; i++) { tree *opp, op = stream_read_tree (ib, data_in); gimple_set_op (stmt, i, op); if (!op) continue; opp = gimple_op_ptr (stmt, i); if (TREE_CODE (*opp) == ADDR_EXPR) opp = &TREE_OPERAND (*opp, 0); while (handled_component_p (*opp)) { if (TREE_CODE (*opp) == COMPONENT_REF) { /* Fixup FIELD_DECLs in COMPONENT_REFs, they are not handled by decl merging. */ tree field, type, tem; tree closest_match = NULL_TREE; field = TREE_OPERAND (*opp, 1); type = DECL_CONTEXT (field); for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem)) { if (TREE_CODE (tem) != FIELD_DECL) continue; if (tem == field) break; if (DECL_NONADDRESSABLE_P (tem) == DECL_NONADDRESSABLE_P (field) && gimple_compare_field_offset (tem, field)) { if (types_compatible_p (TREE_TYPE (tem), TREE_TYPE (field))) break; else closest_match = tem; } } /* In case of type mismatches across units we can fail to unify some types and thus not find a proper field-decl here. */ if (tem == NULL_TREE) { /* Thus, emit a ODR violation warning. */ if (warning_at (gimple_location (stmt), 0, "use of type %<%E%> with two mismatching " "declarations at field %<%E%>", type, TREE_OPERAND (*opp, 1))) { if (TYPE_FIELDS (type)) inform (DECL_SOURCE_LOCATION (TYPE_FIELDS (type)), "original type declared here"); inform (DECL_SOURCE_LOCATION (TREE_OPERAND (*opp, 1)), "field in mismatching type declared here"); if (TYPE_NAME (TREE_TYPE (field)) && (TREE_CODE (TYPE_NAME (TREE_TYPE (field))) == TYPE_DECL)) inform (DECL_SOURCE_LOCATION (TYPE_NAME (TREE_TYPE (field))), "type of field declared here"); if (closest_match && TYPE_NAME (TREE_TYPE (closest_match)) && (TREE_CODE (TYPE_NAME (TREE_TYPE (closest_match))) == TYPE_DECL)) inform (DECL_SOURCE_LOCATION (TYPE_NAME (TREE_TYPE (closest_match))), "type of mismatching field declared here"); } /* And finally fixup the types. */ TREE_OPERAND (*opp, 0) = build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (*opp, 0)); } else TREE_OPERAND (*opp, 1) = tem; } else if ((TREE_CODE (*opp) == ARRAY_REF || TREE_CODE (*opp) == ARRAY_RANGE_REF) && (TREE_CODE (TREE_TYPE (TREE_OPERAND (*opp, 0))) != ARRAY_TYPE)) { /* And ARRAY_REFs to objects that had mismatched types during symbol merging to avoid ICEs. */ TREE_OPERAND (*opp, 0) = build1 (VIEW_CONVERT_EXPR, build_array_type (TREE_TYPE (*opp), NULL_TREE), TREE_OPERAND (*opp, 0)); } opp = &TREE_OPERAND (*opp, 0); } /* At LTO output time we wrap all global decls in MEM_REFs to allow seamless replacement with prevailing decls. Undo this here if the prevailing decl allows for this. ??? Maybe we should simply fold all stmts. */ if (TREE_CODE (*opp) == MEM_REF && TREE_CODE (TREE_OPERAND (*opp, 0)) == ADDR_EXPR && integer_zerop (TREE_OPERAND (*opp, 1)) && (TREE_THIS_VOLATILE (*opp) == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))) && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*opp, 1))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (*opp, 1)))) && (TREE_TYPE (*opp) == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0)))) *opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) gimple_call_set_internal_fn (stmt, streamer_read_enum (ib, internal_fn, IFN_LAST)); else gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in)); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in)); break; default: internal_error ("bytecode stream: unknown GIMPLE statement tag %s", lto_tag_name (tag)); } /* Update the properties of symbols, SSA names and labels associated with STMT. */ if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) { tree lhs = gimple_get_lhs (stmt); if (lhs && TREE_CODE (lhs) == SSA_NAME) SSA_NAME_DEF_STMT (lhs) = stmt; } else if (code == GIMPLE_LABEL) gcc_assert (emit_label_in_global_context_p (gimple_label_label (stmt)) || DECL_CONTEXT (gimple_label_label (stmt)) == fn->decl); else if (code == GIMPLE_ASM) { unsigned i; for (i = 0; i < gimple_asm_noutputs (stmt); i++) { tree op = TREE_VALUE (gimple_asm_output_op (stmt, i)); if (TREE_CODE (op) == SSA_NAME) SSA_NAME_DEF_STMT (op) = stmt; } } /* Reset alias information. */ if (code == GIMPLE_CALL) gimple_call_reset_alias_info (stmt); /* Mark the statement modified so its operand vectors can be filled in. */ gimple_set_modified (stmt, true); if (has_hist) stream_in_histogram_value (ib, stmt); return stmt; }
void maybe_add_lambda_conv_op (tree type) { bool nested = (cfun != NULL); bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); tree callop = lambda_function (type); if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) return; if (processing_template_decl) return; bool const generic_lambda_p = generic_lambda_fn_p (callop); if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) { /* If the op() wasn't instantiated due to errors, give up. */ gcc_assert (errorcount || sorrycount); return; } /* Non-template conversion operators are defined directly with build_call_a and using DIRECT_ARGVEC for arguments (including 'this'). Templates are deferred and the CALL is built in-place. In the case of a deduced return call op, the decltype expression, DECLTYPE_CALL, used as a substitute for the return type is also built in-place. The arguments of DECLTYPE_CALL in the return expression may differ in flags from those in the body CALL. In particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in the body CALL, but not in DECLTYPE_CALL. */ vec<tree, va_gc> *direct_argvec = 0; tree decltype_call = 0, call = 0; tree optype = TREE_TYPE (callop); tree fn_result = TREE_TYPE (optype); if (generic_lambda_p) { /* Prepare the dependent member call for the static member function '_FUN' and, potentially, prepare another call to be used in a decltype return expression for a deduced return call op to allow for simple implementation of the conversion operator. */ tree instance = build_nop (type, null_pointer_node); tree objfn = build_min (COMPONENT_REF, NULL_TREE, instance, DECL_NAME (callop), NULL_TREE); int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; call = prepare_op_call (objfn, nargs); if (type_uses_auto (fn_result)) decltype_call = prepare_op_call (objfn, nargs); } else { direct_argvec = make_tree_vector (); direct_argvec->quick_push (build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)), null_pointer_node)); } /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to declare the static member function "_FUN" below. For each arg append to DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated call args (for the template case). If a parameter pack is found, expand it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ tree fn_args = NULL_TREE; { int ix = 0; tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); tree tgt; while (src) { tree new_node = copy_node (src); if (!fn_args) fn_args = tgt = new_node; else { TREE_CHAIN (tgt) = new_node; tgt = new_node; } mark_exp_read (tgt); if (generic_lambda_p) { if (DECL_PACK_P (tgt)) { tree a = make_pack_expansion (tgt); if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); PACK_EXPANSION_LOCAL_P (a) = true; CALL_EXPR_ARG (call, ix) = a; } else { tree a = convert_from_reference (tgt); CALL_EXPR_ARG (call, ix) = a; if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); } ++ix; } else vec_safe_push (direct_argvec, tgt); src = TREE_CHAIN (src); } } if (generic_lambda_p) { if (decltype_call) { ++processing_template_decl; fn_result = finish_decltype_type (decltype_call, /*id_expression_or_member_access_p=*/false, tf_warning_or_error); --processing_template_decl; } } else call = build_call_a (callop, direct_argvec->length (), direct_argvec->address ()); CALL_FROM_THUNK_P (call) = 1; tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); stattype = (cp_build_type_attribute_variant (stattype, TYPE_ATTRIBUTES (optype))); /* First build up the conversion op. */ tree rettype = build_pointer_type (stattype); tree name = mangle_conv_op_name_for_type (rettype); tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); tree fntype = build_method_type_directly (thistype, rettype, void_list_node); tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); tree fn = convfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); DECL_ALIGN (fn) = MINIMUM_METHOD_BOUNDARY; SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); /* Generic thunk code fails for varargs; we'll complain in mark_used if the conversion op is used. */ if (varargs_function_p (callop)) { DECL_DELETED_FN (fn) = 1; return; } /* Now build up the thunk to be returned. */ name = get_identifier ("_FUN"); tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); fn = statfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_STATIC_FUNCTION_P (fn) = 1; DECL_ARGUMENTS (fn) = fn_args; for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) { /* Avoid duplicate -Wshadow warnings. */ DECL_NAME (arg) = NULL_TREE; DECL_CONTEXT (arg) = fn; } if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); if (flag_sanitize & SANITIZE_NULL) { /* Don't UBsan this function; we're deliberately calling op() with a null object argument. */ tree attrs = build_tree_list (get_identifier ("no_sanitize_undefined"), NULL_TREE); cplus_decl_attributes (&fn, attrs, 0); } add_method (type, fn, NULL_TREE); if (nested) push_function_context (); else /* Still increment function_depth so that we don't GC in the middle of an expression. */ ++function_depth; /* Generate the body of the thunk. */ start_preparsed_function (statfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); if (DECL_ONE_ONLY (statfn)) { /* Put the thunk in the same comdat group as the call op. */ cgraph_node::get_create (statfn)->add_to_same_comdat_group (cgraph_node::get_create (callop)); } tree body = begin_function_body (); tree compound_stmt = begin_compound_stmt (0); if (!generic_lambda_p) { set_flags_from_callee (call); if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); } call = convert_from_reference (call); finish_return_stmt (call); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); /* Generate the body of the conversion op. */ start_preparsed_function (convfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); body = begin_function_body (); compound_stmt = begin_compound_stmt (0); /* decl_needed_p needs to see that it's used. */ TREE_USED (statfn) = 1; finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); if (nested) pop_function_context (); else --function_depth; }
static tree cp_convert_to_pointer (tree type, tree expr, bool force) { tree intype = TREE_TYPE (expr); enum tree_code form; tree rval; if (intype == error_mark_node) return error_mark_node; if (IS_AGGR_TYPE (intype)) { intype = complete_type (intype); if (!COMPLETE_TYPE_P (intype)) { error ("can't convert from incomplete type `%T' to `%T'", intype, type); return error_mark_node; } rval = build_type_conversion (type, expr); if (rval) { if (rval == error_mark_node) error ("conversion of `%E' from `%T' to `%T' is ambiguous", expr, intype, type); return rval; } } /* Handle anachronistic conversions from (::*)() to cv void* or (*)(). */ if (TREE_CODE (type) == POINTER_TYPE && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE || VOID_TYPE_P (TREE_TYPE (type)))) { /* Allow an implicit this pointer for pointer to member functions. */ if (TYPE_PTRMEMFUNC_P (intype)) { if (pedantic || warn_pmf2ptr) pedwarn ("converting from `%T' to `%T'", intype, type); if (TREE_CODE (expr) == PTRMEM_CST) expr = build_address (PTRMEM_CST_MEMBER (expr)); else { tree decl = maybe_dummy_object (TYPE_PTRMEM_CLASS_TYPE (intype), 0); decl = build_address (decl); expr = get_member_function_from_ptrfunc (&decl, expr); } } else if (TREE_CODE (TREE_TYPE (expr)) == METHOD_TYPE) { if (pedantic || warn_pmf2ptr) pedwarn ("converting from `%T' to `%T'", intype, type); expr = build_addr_func (expr); } if (TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE) return build_nop (type, expr); intype = TREE_TYPE (expr); } if (expr == error_mark_node) return error_mark_node; form = TREE_CODE (intype); if (POINTER_TYPE_P (intype)) { intype = TYPE_MAIN_VARIANT (intype); if (TYPE_MAIN_VARIANT (type) != intype && TREE_CODE (type) == POINTER_TYPE && TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE && IS_AGGR_TYPE (TREE_TYPE (type)) && IS_AGGR_TYPE (TREE_TYPE (intype)) && TREE_CODE (TREE_TYPE (intype)) == RECORD_TYPE) { enum tree_code code = PLUS_EXPR; tree binfo; tree intype_class; tree type_class; bool same_p; intype_class = TREE_TYPE (intype); type_class = TREE_TYPE (type); same_p = same_type_p (TYPE_MAIN_VARIANT (intype_class), TYPE_MAIN_VARIANT (type_class)); binfo = NULL_TREE; /* Try derived to base conversion. */ if (!same_p) binfo = lookup_base (intype_class, type_class, ba_check, NULL); if (!same_p && !binfo) { /* Try base to derived conversion. */ binfo = lookup_base (type_class, intype_class, ba_check, NULL); code = MINUS_EXPR; } if (binfo == error_mark_node) return error_mark_node; if (binfo || same_p) { if (binfo) expr = build_base_path (code, expr, binfo, 0); /* Add any qualifier conversions. */ return build_nop (type, expr); } } if (TYPE_PTRMEMFUNC_P (type)) { error ("cannot convert `%E' from type `%T' to type `%T'", expr, intype, type); return error_mark_node; } return build_nop (type, expr); } else if (TYPE_PTRMEM_P (type) && TYPE_PTRMEM_P (intype)) { tree b1; tree b2; tree binfo; enum tree_code code = PLUS_EXPR; base_kind bk; b1 = TYPE_PTRMEM_CLASS_TYPE (type); b2 = TYPE_PTRMEM_CLASS_TYPE (intype); binfo = lookup_base (b1, b2, ba_check, &bk); if (!binfo) { binfo = lookup_base (b2, b1, ba_check, &bk); code = MINUS_EXPR; } if (binfo == error_mark_node) return error_mark_node; if (bk == bk_via_virtual) { if (force) warning ("pointer to member cast from `%T' to `%T' is via virtual base", intype, type); else { error ("pointer to member cast from `%T' to `%T' is via virtual base", intype, type); return error_mark_node; } /* This is a reinterpret cast, whose result is unspecified. We choose to do nothing. */ return build1 (NOP_EXPR, type, expr); } if (TREE_CODE (expr) == PTRMEM_CST) expr = cplus_expand_constant (expr); if (binfo && !integer_zerop (BINFO_OFFSET (binfo))) expr = size_binop (code, build_nop (sizetype, expr), BINFO_OFFSET (binfo)); return build_nop (type, expr); } else if (TYPE_PTRMEMFUNC_P (type) && TYPE_PTRMEMFUNC_P (intype)) return build_ptrmemfunc (TYPE_PTRMEMFUNC_FN_TYPE (type), expr, 0); else if (TYPE_PTRMEMFUNC_P (intype)) { if (!warn_pmf2ptr) { if (TREE_CODE (expr) == PTRMEM_CST) return cp_convert_to_pointer (type, PTRMEM_CST_MEMBER (expr), force); else if (TREE_CODE (expr) == OFFSET_REF) { tree object = TREE_OPERAND (expr, 0); return get_member_function_from_ptrfunc (&object, TREE_OPERAND (expr, 1)); } } error ("cannot convert `%E' from type `%T' to type `%T'", expr, intype, type); return error_mark_node; } if (integer_zerop (expr)) { if (TYPE_PTRMEMFUNC_P (type)) return build_ptrmemfunc (TYPE_PTRMEMFUNC_FN_TYPE (type), expr, 0); if (TYPE_PTRMEM_P (type)) /* A NULL pointer-to-member is represented by -1, not by zero. */ expr = build_int_2 (-1, -1); else expr = build_int_2 (0, 0); TREE_TYPE (expr) = type; /* Fix up the representation of -1 if appropriate. */ force_fit_type (expr, 0); return expr; } else if (TYPE_PTR_TO_MEMBER_P (type) && INTEGRAL_CODE_P (form)) { error ("invalid conversion from '%T' to '%T'", intype, type); return error_mark_node; } if (INTEGRAL_CODE_P (form)) { if (TYPE_PRECISION (intype) == POINTER_SIZE) return build1 (CONVERT_EXPR, type, expr); expr = cp_convert (c_common_type_for_size (POINTER_SIZE, 0), expr); /* Modes may be different but sizes should be the same. */ if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (expr))) != GET_MODE_SIZE (TYPE_MODE (type))) /* There is supposed to be some integral type that is the same width as a pointer. */ abort (); return convert_to_pointer (type, expr); } if (type_unknown_p (expr)) return instantiate_type (type, expr, tf_error | tf_warning); error ("cannot convert `%E' from type `%T' to type `%T'", expr, intype, type); return error_mark_node; }
tree convert_to_void (tree expr, const char *implicit) { if (expr == error_mark_node || TREE_TYPE (expr) == error_mark_node) return error_mark_node; if (!TREE_TYPE (expr)) return expr; if (invalid_nonstatic_memfn_p (expr)) return error_mark_node; if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR) { error ("pseudo-destructor is not called"); return error_mark_node; } if (VOID_TYPE_P (TREE_TYPE (expr))) return expr; switch (TREE_CODE (expr)) { case COND_EXPR: { /* The two parts of a cond expr might be separate lvalues. */ tree op1 = TREE_OPERAND (expr,1); tree op2 = TREE_OPERAND (expr,2); tree new_op1 = convert_to_void (op1, (implicit && !TREE_SIDE_EFFECTS (op2) ? "second operand of conditional" : NULL)); tree new_op2 = convert_to_void (op2, (implicit && !TREE_SIDE_EFFECTS (op1) ? "third operand of conditional" : NULL)); expr = build (COND_EXPR, TREE_TYPE (new_op1), TREE_OPERAND (expr, 0), new_op1, new_op2); break; } case COMPOUND_EXPR: { /* The second part of a compound expr contains the value. */ tree op1 = TREE_OPERAND (expr,1); tree new_op1 = convert_to_void (op1, (implicit && !TREE_NO_UNUSED_WARNING (expr) ? "right-hand operand of comma" : NULL)); if (new_op1 != op1) { tree t = build (COMPOUND_EXPR, TREE_TYPE (new_op1), TREE_OPERAND (expr, 0), new_op1); expr = t; } break; } case NON_LVALUE_EXPR: case NOP_EXPR: /* These have already decayed to rvalue. */ break; case CALL_EXPR: /* We have a special meaning for volatile void fn(). */ break; case INDIRECT_REF: { tree type = TREE_TYPE (expr); int is_reference = TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == REFERENCE_TYPE; int is_volatile = TYPE_VOLATILE (type); int is_complete = COMPLETE_TYPE_P (complete_type (type)); if (is_volatile && !is_complete) warning ("object of incomplete type `%T' will not be accessed in %s", type, implicit ? implicit : "void context"); else if (is_reference && is_volatile) warning ("object of type `%T' will not be accessed in %s", TREE_TYPE (TREE_OPERAND (expr, 0)), implicit ? implicit : "void context"); if (is_reference || !is_volatile || !is_complete) expr = TREE_OPERAND (expr, 0); break; } case VAR_DECL: { /* External variables might be incomplete. */ tree type = TREE_TYPE (expr); int is_complete = COMPLETE_TYPE_P (complete_type (type)); if (TYPE_VOLATILE (type) && !is_complete) warning ("object `%E' of incomplete type `%T' will not be accessed in %s", expr, type, implicit ? implicit : "void context"); break; } default:; } { tree probe = expr; if (TREE_CODE (probe) == ADDR_EXPR) probe = TREE_OPERAND (expr, 0); if (type_unknown_p (probe)) { /* [over.over] enumerates the places where we can take the address of an overloaded function, and this is not one of them. */ pedwarn ("%s cannot resolve address of overloaded function", implicit ? implicit : "void cast"); expr = void_zero_node; } else if (implicit && probe == expr && is_overloaded_fn (probe)) /* Only warn when there is no &. */ warning ("%s is a reference, not call, to function `%E'", implicit, expr); } if (expr != error_mark_node && !VOID_TYPE_P (TREE_TYPE (expr))) { if (implicit && !TREE_SIDE_EFFECTS (expr) && warn_unused_value) warning ("%s has no effect", implicit); expr = build1 (CONVERT_EXPR, void_type_node, expr); } return expr; }
tree ocp_convert (tree type, tree expr, int convtype, int flags) { tree e = expr; enum tree_code code = TREE_CODE (type); if (error_operand_p (e) || type == error_mark_node) return error_mark_node; complete_type (type); complete_type (TREE_TYPE (expr)); e = decl_constant_value (e); if (IS_AGGR_TYPE (type) && (convtype & CONV_FORCE_TEMP) /* Some internal structures (vtable_entry_type, sigtbl_ptr_type) don't go through finish_struct, so they don't have the synthesized constructors. So don't force a temporary. */ && TYPE_HAS_CONSTRUCTOR (type)) /* We need a new temporary; don't take this shortcut. */; else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (e))) { if (same_type_p (type, TREE_TYPE (e))) /* The call to fold will not always remove the NOP_EXPR as might be expected, since if one of the types is a typedef; the comparison in fold is just equality of pointers, not a call to comptypes. We don't call fold in this case because that can result in infinite recursion; fold will call convert, which will call ocp_convert, etc. */ return e; /* For complex data types, we need to perform componentwise conversion. */ else if (TREE_CODE (type) == COMPLEX_TYPE) return fold (convert_to_complex (type, e)); else if (TREE_CODE (e) == TARGET_EXPR) { /* Don't build a NOP_EXPR of class type. Instead, change the type of the temporary. Only allow this for cv-qual changes, though. */ if (!same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (e)), TYPE_MAIN_VARIANT (type))) abort (); TREE_TYPE (e) = TREE_TYPE (TARGET_EXPR_SLOT (e)) = type; return e; } else if (TREE_ADDRESSABLE (type)) /* We shouldn't be treating objects of ADDRESSABLE type as rvalues. */ abort (); else return fold (build1 (NOP_EXPR, type, e)); } if (code == VOID_TYPE && (convtype & CONV_STATIC)) { e = convert_to_void (e, /*implicit=*/NULL); return e; } if (INTEGRAL_CODE_P (code)) { tree intype = TREE_TYPE (e); /* enum = enum, enum = int, enum = float, (enum)pointer are all errors. */ if (TREE_CODE (type) == ENUMERAL_TYPE && ((ARITHMETIC_TYPE_P (intype) && ! (convtype & CONV_STATIC)) || (TREE_CODE (intype) == POINTER_TYPE))) { pedwarn ("conversion from `%#T' to `%#T'", intype, type); if (flag_pedantic_errors) return error_mark_node; } if (IS_AGGR_TYPE (intype)) { tree rval; rval = build_type_conversion (type, e); if (rval) return rval; if (flags & LOOKUP_COMPLAIN) error ("`%#T' used where a `%T' was expected", intype, type); if (flags & LOOKUP_SPECULATIVELY) return NULL_TREE; return error_mark_node; } if (code == BOOLEAN_TYPE) return cp_truthvalue_conversion (e); return fold (convert_to_integer (type, e)); } if (POINTER_TYPE_P (type) || TYPE_PTR_TO_MEMBER_P (type)) return fold (cp_convert_to_pointer (type, e, false)); if (code == VECTOR_TYPE) return fold (convert_to_vector (type, e)); if (code == REAL_TYPE || code == COMPLEX_TYPE) { if (IS_AGGR_TYPE (TREE_TYPE (e))) { tree rval; rval = build_type_conversion (type, e); if (rval) return rval; else if (flags & LOOKUP_COMPLAIN) error ("`%#T' used where a floating point value was expected", TREE_TYPE (e)); } if (code == REAL_TYPE) return fold (convert_to_real (type, e)); else if (code == COMPLEX_TYPE) return fold (convert_to_complex (type, e)); } /* New C++ semantics: since assignment is now based on memberwise copying, if the rhs type is derived from the lhs type, then we may still do a conversion. */ if (IS_AGGR_TYPE_CODE (code)) { tree dtype = TREE_TYPE (e); tree ctor = NULL_TREE; dtype = TYPE_MAIN_VARIANT (dtype); /* Conversion between aggregate types. New C++ semantics allow objects of derived type to be cast to objects of base type. Old semantics only allowed this between pointers. There may be some ambiguity between using a constructor vs. using a type conversion operator when both apply. */ ctor = e; if (abstract_virtuals_error (NULL_TREE, type)) return error_mark_node; if ((flags & LOOKUP_ONLYCONVERTING) && ! (IS_AGGR_TYPE (dtype) && DERIVED_FROM_P (type, dtype))) /* For copy-initialization, first we create a temp of the proper type with a user-defined conversion sequence, then we direct-initialize the target with the temp (see [dcl.init]). */ ctor = build_user_type_conversion (type, ctor, flags); else ctor = build_special_member_call (NULL_TREE, complete_ctor_identifier, build_tree_list (NULL_TREE, ctor), TYPE_BINFO (type), flags); if (ctor) return build_cplus_new (type, ctor); } if (flags & LOOKUP_COMPLAIN) error ("conversion from `%T' to non-scalar type `%T' requested", TREE_TYPE (expr), type); if (flags & LOOKUP_SPECULATIVELY) return NULL_TREE; return error_mark_node; }
static tree cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) { tree defparm, parm, t; int i = 0; int nargs; tree *argarray; if (fn == NULL) return NULL; nargs = list_length (DECL_ARGUMENTS (fn)); argarray = (tree *) alloca (nargs * sizeof (tree)); defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); if (arg2) defparm = TREE_CHAIN (defparm); if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) { tree inner_type = TREE_TYPE (arg1); tree start1, end1, p1; tree start2 = NULL, p2 = NULL; tree ret = NULL, lab; start1 = arg1; start2 = arg2; do { inner_type = TREE_TYPE (inner_type); start1 = build4 (ARRAY_REF, inner_type, start1, size_zero_node, NULL, NULL); if (arg2) start2 = build4 (ARRAY_REF, inner_type, start2, size_zero_node, NULL, NULL); } while (TREE_CODE (inner_type) == ARRAY_TYPE); start1 = build_fold_addr_expr (start1); if (arg2) start2 = build_fold_addr_expr (start2); end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1); p1 = create_tmp_var (TREE_TYPE (start1), NULL); t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1); append_to_statement_list (t, &ret); if (arg2) { p2 = create_tmp_var (TREE_TYPE (start2), NULL); t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2); append_to_statement_list (t, &ret); } lab = create_artificial_label (); t = build1 (LABEL_EXPR, void_type_node, lab); append_to_statement_list (t, &ret); argarray[i++] = p1; if (arg2) argarray[i++] = p2; /* Handle default arguments. */ for (parm = defparm; parm && parm != void_list_node; parm = TREE_CHAIN (parm), i++) argarray[i] = convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i); t = build_call_a (fn, i, argarray); t = fold_convert (void_type_node, t); t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); append_to_statement_list (t, &ret); t = TYPE_SIZE_UNIT (inner_type); t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t); t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t); append_to_statement_list (t, &ret); if (arg2) { t = TYPE_SIZE_UNIT (inner_type); t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t); t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t); append_to_statement_list (t, &ret); } t = build2 (NE_EXPR, boolean_type_node, p1, end1); t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); append_to_statement_list (t, &ret); return ret; } else { argarray[i++] = build_fold_addr_expr (arg1); if (arg2) argarray[i++] = build_fold_addr_expr (arg2); /* Handle default arguments. */ for (parm = defparm; parm && parm != void_list_node; parm = TREE_CHAIN (parm), i++) argarray[i] = convert_default_arg (TREE_VALUE (parm), TREE_PURPOSE (parm), fn, i); t = build_call_a (fn, i, argarray); t = fold_convert (void_type_node, t); return fold_build_cleanup_point_expr (TREE_TYPE (t), t); } }
/* Return the tree for an expresssion, type EXP_TYPE (see treetree.h) with tree type TYPE and with operands1 OP1, OP2 (maybe), OP3 (maybe). */ tree tree_code_get_expression (unsigned int exp_type, tree type, tree op1, tree op2, tree op3 ATTRIBUTE_UNUSED) { tree ret1; int operator; switch (exp_type) { case EXP_ASSIGN: if (!op1 || !op2) abort (); operator = MODIFY_EXPR; ret1 = build (operator, type, op1, build1 (CONVERT_EXPR, type, op2)); break; case EXP_PLUS: operator = PLUS_EXPR; goto binary_expression; case EXP_MINUS: operator = MINUS_EXPR; goto binary_expression; case EXP_EQUALS: operator = EQ_EXPR; goto binary_expression; /* Expand a binary expression. Ensure the operands are the right type. */ binary_expression: if (!op1 || !op2) abort (); ret1 = build (operator, type, build1 (CONVERT_EXPR, type, op1), build1 (CONVERT_EXPR, type, op2)); break; /* Reference to a variable. This is dead easy, just return the decl for the variable. If the TYPE is different than the variable type, convert it. */ case EXP_REFERENCE: if (!op1) abort (); if (type == TREE_TYPE (op1)) ret1 = op1; else ret1 = build1 (CONVERT_EXPR, type, op1); break; case EXP_FUNCTION_INVOCATION: if (!op1 || !op2) abort (); { tree fun_ptr; fun_ptr = build1 (ADDR_EXPR, build_pointer_type (type), op1); ret1 = build (CALL_EXPR, type, fun_ptr, nreverse (op2)); } break; default: abort (); } return ret1; }
tree build_throw (tree exp) { tree fn; if (exp == error_mark_node) return exp; if (processing_template_decl) { if (cfun) current_function_returns_abnormally = 1; exp = build_min (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; } if (exp == null_node) warning (0, "throwing NULL, which has integral, not pointer type"); if (exp != NULL_TREE) { if (!is_admissible_throw_operand_or_catch_parameter (exp, true)) return error_mark_node; } if (! doing_eh ()) return error_mark_node; if (exp && decl_is_java_type (TREE_TYPE (exp), 1)) { tree fn = get_identifier ("_Jv_Throw"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void _Jv_Throw (void *). */ tree tmp; tmp = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE); fn = push_throw_library_fn (fn, tmp); } else if (really_overloaded_fn (fn)) { error ("%qD should never be overloaded", fn); return error_mark_node; } fn = OVL_CURRENT (fn); exp = cp_build_function_call_nary (fn, tf_warning_or_error, exp, NULL_TREE); } else if (exp) { tree throw_type; tree temp_type; tree cleanup; tree object, ptr; tree tmp; tree allocate_expr; /* The CLEANUP_TYPE is the internal type of a destructor. */ if (!cleanup_type) { tmp = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); cleanup_type = build_pointer_type (tmp); } fn = get_identifier ("__cxa_throw"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void __cxa_throw (void*, void*, void (*)(void*)). */ /* ??? Second argument is supposed to be "std::type_info*". */ tmp = build_function_type_list (void_type_node, ptr_type_node, ptr_type_node, cleanup_type, NULL_TREE); fn = push_throw_library_fn (fn, tmp); if (flag_tm) { tree fn2 = get_identifier ("_ITM_cxa_throw"); if (!get_global_value_if_present (fn2, &fn2)) fn2 = push_throw_library_fn (fn2, tmp); apply_tm_attr (fn2, get_identifier ("transaction_pure")); record_tm_replacement (fn, fn2); } } /* [except.throw] A throw-expression initializes a temporary object, the type of which is determined by removing any top-level cv-qualifiers from the static type of the operand of throw and adjusting the type from "array of T" or "function return T" to "pointer to T" or "pointer to function returning T" respectively. */ temp_type = is_bitfield_expr_with_lowered_type (exp); if (!temp_type) temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp))); /* OK, this is kind of wacky. The standard says that we call terminate when the exception handling mechanism, after completing evaluation of the expression to be thrown but before the exception is caught (_except.throw_), calls a user function that exits via an uncaught exception. So we have to protect the actual initialization of the exception object with terminate(), but evaluate the expression first. Since there could be temps in the expression, we need to handle that, too. We also expand the call to __cxa_allocate_exception first (which doesn't matter, since it can't throw). */ /* Allocate the space for the exception. */ allocate_expr = do_allocate_exception (temp_type); allocate_expr = get_target_expr (allocate_expr); ptr = TARGET_EXPR_SLOT (allocate_expr); TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr); CLEANUP_EH_ONLY (allocate_expr) = 1; object = build_nop (build_pointer_type (temp_type), ptr); object = cp_build_indirect_ref (object, RO_NULL, tf_warning_or_error); /* And initialize the exception object. */ if (CLASS_TYPE_P (temp_type)) { int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING; vec<tree, va_gc> *exp_vec; /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes treated as an rvalue for the purposes of overload resolution to favor move constructors over copy constructors. */ if (/* Must be a local, automatic variable. */ VAR_P (exp) && DECL_CONTEXT (exp) == current_function_decl && ! TREE_STATIC (exp) /* The variable must not have the `volatile' qualifier. */ && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE)) flags = flags | LOOKUP_PREFER_RVALUE; /* Call the copy constructor. */ exp_vec = make_tree_vector_single (exp); exp = (build_special_member_call (object, complete_ctor_identifier, &exp_vec, TREE_TYPE (object), flags, tf_warning_or_error)); release_tree_vector (exp_vec); if (exp == error_mark_node) { error (" in thrown expression"); return error_mark_node; } } else { tmp = decay_conversion (exp, tf_warning_or_error); if (tmp == error_mark_node) return error_mark_node; exp = build2 (INIT_EXPR, temp_type, object, tmp); } /* Mark any cleanups from the initialization as MUST_NOT_THROW, since they are run after the exception object is initialized. */ cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0); /* Prepend the allocation. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp); /* Force all the cleanups to be evaluated here so that we don't have to do them during unwinding. */ exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp); throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object))); cleanup = NULL_TREE; if (type_build_dtor_call (TREE_TYPE (object))) { tree fn = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)), complete_dtor_identifier, 0); fn = BASELINK_FUNCTIONS (fn); mark_used (fn); if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object))) { cxx_mark_addressable (fn); /* Pretend it's a normal function. */ cleanup = build1 (ADDR_EXPR, cleanup_type, fn); } } if (cleanup == NULL_TREE) cleanup = build_int_cst (cleanup_type, 0); /* ??? Indicate that this function call throws throw_type. */ tmp = cp_build_function_call_nary (fn, tf_warning_or_error, ptr, throw_type, cleanup, NULL_TREE); /* Tack on the initialization stuff. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp); } else { /* Rethrow current exception. */ tree fn = get_identifier ("__cxa_rethrow"); if (!get_global_value_if_present (fn, &fn)) { /* Declare void __cxa_rethrow (void). */ fn = push_throw_library_fn (fn, build_function_type_list (void_type_node, NULL_TREE)); } if (flag_tm) apply_tm_attr (fn, get_identifier ("transaction_pure")); /* ??? Indicate that this function call allows exceptions of the type of the enclosing catch block (if known). */ exp = cp_build_function_call_vec (fn, NULL, tf_warning_or_error); } exp = build1 (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; }
static tree gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first) { tree top, entry, exit, cont_block, break_block, stmt_list, t; location_t stmt_locus; stmt_locus = input_location; stmt_list = NULL_TREE; entry = NULL_TREE; break_block = begin_bc_block (bc_break); cont_block = begin_bc_block (bc_continue); /* If condition is zero don't generate a loop construct. */ if (cond && integer_zerop (cond)) { top = NULL_TREE; exit = NULL_TREE; if (cond_is_first) { t = build_bc_goto (bc_break); append_to_statement_list (t, &stmt_list); } } else { /* If we use a LOOP_EXPR here, we have to feed the whole thing back through the main gimplifier to lower it. Given that we have to gimplify the loop body NOW so that we can resolve break/continue stmts, seems easier to just expand to gotos. */ top = build1 (LABEL_EXPR, void_type_node, NULL_TREE); /* If we have an exit condition, then we build an IF with gotos either out of the loop, or to the top of it. If there's no exit condition, then we just build a jump back to the top. */ exit = build_and_jump (&LABEL_EXPR_LABEL (top)); if (cond && !integer_nonzerop (cond)) { t = build_bc_goto (bc_break); exit = build3 (COND_EXPR, void_type_node, cond, exit, t); exit = fold (exit); gimplify_stmt (&exit); if (cond_is_first) { if (incr) { entry = build1 (LABEL_EXPR, void_type_node, NULL_TREE); t = build_and_jump (&LABEL_EXPR_LABEL (entry)); } else t = build_bc_goto (bc_continue); append_to_statement_list (t, &stmt_list); } } } gimplify_stmt (&body); gimplify_stmt (&incr); body = finish_bc_block (bc_continue, cont_block, body); append_to_statement_list (top, &stmt_list); append_to_statement_list (body, &stmt_list); append_to_statement_list (incr, &stmt_list); append_to_statement_list (entry, &stmt_list); append_to_statement_list (exit, &stmt_list); annotate_all_with_locus (&stmt_list, stmt_locus); return finish_bc_block (bc_break, break_block, stmt_list); }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. Return the gimple sequence after synthesis. */ gimple_seq mx_register_decls (tree decl, gimple_seq seq, gimple stmt, location_t location, bool func_args) { gimple_seq finally_stmts = NULL; gimple_stmt_iterator initially_stmts = gsi_start (seq); bool sframe_inserted = false; size_t front_rz_size, rear_rz_size; tree fsize, rsize, size; gimple uninit_fncall_front, uninit_fncall_rear, init_fncall_front, \ init_fncall_rear, init_assign_stmt; tree fncall_param_front, fncall_param_rear; int map_ret; while (decl != NULL_TREE) { if ((mf_decl_eligible_p (decl) || TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl) && get_name(decl)) { DEBUGLOG("DEBUG Instrumenting %s is_complete_type %d\n", IDENTIFIER_POINTER(DECL_NAME(decl)), COMPLETE_TYPE_P(decl)); /* construct a tree corresponding to the type struct{ unsigned int rz_front[6U]; original variable unsigned int rz_rear[6U]; }; */ if (!sframe_inserted){ gimple ensure_fn_call = gimple_build_call (lbc_ensure_sframe_bitmap_fndecl, 0); gimple_set_location (ensure_fn_call, location); gsi_insert_before (&initially_stmts, ensure_fn_call, GSI_SAME_STMT); sframe_inserted = true; } // Calculate the zone sizes size_t element_size = 0, request_size = 0; if (COMPLETE_TYPE_P(decl)){ request_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(decl))); if (TREE_CODE(TREE_TYPE(decl)) == ARRAY_TYPE) element_size = TREE_INT_CST_LOW(TYPE_SIZE_UNIT(TREE_TYPE(TREE_TYPE(decl)))); else element_size = request_size; } calculate_zone_sizes(element_size, request_size, /*global*/ false, COMPLETE_TYPE_P(decl), &front_rz_size, &rear_rz_size); DEBUGLOG("DEBUG *SIZES* req_size %u, ele_size %u, fsize %u, rsize %u\n", request_size, element_size, front_rz_size, rear_rz_size); tree struct_type = create_struct_type(decl, front_rz_size, rear_rz_size); tree struct_var = create_struct_var(struct_type, decl, location); declare_vars(struct_var, stmt, 0); /* Inserting into hashtable */ PWord_t PV; JSLI(PV, decl_map, mf_varname_tree(decl)); gcc_assert(PV); *PV = (PWord_t) struct_var; fsize = convert (unsigned_type_node, size_int(front_rz_size)); gcc_assert (is_gimple_val (fsize)); tree rz_front = TYPE_FIELDS(struct_type); fncall_param_front = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_front), struct_var, rz_front, NULL_TREE))); uninit_fncall_front = gimple_build_call (lbc_uninit_front_rz_fndecl, 2, fncall_param_front, fsize); init_fncall_front = gimple_build_call (lbc_init_front_rz_fndecl, 2, fncall_param_front, fsize); gimple_set_location (init_fncall_front, location); gimple_set_location (uninit_fncall_front, location); // In complete types have only a front red zone if (COMPLETE_TYPE_P(decl)){ rsize = convert (unsigned_type_node, size_int(rear_rz_size)); gcc_assert (is_gimple_val (rsize)); tree rz_rear = DECL_CHAIN(DECL_CHAIN(TYPE_FIELDS (struct_type))); fncall_param_rear = mf_mark (build1 (ADDR_EXPR, ptr_type_node, build3 (COMPONENT_REF, TREE_TYPE(rz_rear), struct_var, rz_rear, NULL_TREE))); init_fncall_rear = gimple_build_call (lbc_init_rear_rz_fndecl, 2, fncall_param_rear, rsize); uninit_fncall_rear = gimple_build_call (lbc_uninit_rear_rz_fndecl, 2, fncall_param_rear, rsize); gimple_set_location (init_fncall_rear, location); gimple_set_location (uninit_fncall_rear, location); } // TODO Do I need this? #if 0 if (DECL_INITIAL(decl) != NULL_TREE){ // This code never seems to be getting executed for somehting like int i = 10; // I have no idea why? But looking at the tree dump, seems like its because // by the time it gets here, these kind of statements are split into two statements // as int i; and i = 10; respectively. I am leaving it in just in case. tree orig_var_type = DECL_CHAIN(TYPE_FIELDS (struct_type)); tree orig_var_lval = mf_mark (build3 (COMPONENT_REF, TREE_TYPE(orig_var_type), struct_var, orig_var_type, NULL_TREE)); init_assign_stmt = gimple_build_assign(orig_var_lval, DECL_INITIAL(decl)); gimple_set_location (init_assign_stmt, location); } #endif if (gsi_end_p (initially_stmts)) { // TODO handle this if (!DECL_ARTIFICIAL (decl)) warning (OPT_Wmudflap, "mudflap cannot track %qE in stub function", DECL_NAME (decl)); } else { #if 0 // Insert the declaration initializer if (DECL_INITIAL(decl) != NULL_TREE) gsi_insert_before (&initially_stmts, init_assign_stmt, GSI_SAME_STMT); #endif //gsi_insert_before (&initially_stmts, register_fncall, GSI_SAME_STMT); gsi_insert_before (&initially_stmts, init_fncall_front, GSI_SAME_STMT); if (COMPLETE_TYPE_P(decl)) gsi_insert_before (&initially_stmts, init_fncall_rear, GSI_SAME_STMT); /* Accumulate the FINALLY piece. */ //gimple_seq_add_stmt (&finally_stmts, unregister_fncall); gimple_seq_add_stmt (&finally_stmts, uninit_fncall_front); if (COMPLETE_TYPE_P(decl)) gimple_seq_add_stmt (&finally_stmts, uninit_fncall_rear); } mf_mark (decl); } decl = DECL_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL) { gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY); gimple_seq new_seq = gimple_seq_alloc (); gimple_seq_add_stmt (&new_seq, stmt); return new_seq; } else return seq; }
tree c_finish_omp_master (tree stmt) { return add_stmt (build1 (OMP_MASTER, void_type_node, stmt)); }
static void mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp, location_t location, tree dirflag) { tree type, base=NULL_TREE, limit, addr, size, t, elt=NULL_TREE; tree temp, field, offset; bool check_red_flag = 0, instrumented = 0; tree fncall_param_val; gimple is_char_red_call; tree temp_instr, type_node; // TODO fix this to use our flag /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; DEBUGLOG("TREE_CODE(t) = %s, mf_decl_eligible_p : %d\n", tree_code_name[(int)TREE_CODE(*tp)], mf_decl_eligible_p(*tp)); t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); /* Don't instrument marked nodes. */ if (mf_marked_p (t) && !mf_decl_eligible_p(t)){ DEBUGLOG("Returning Here - 1\n"); return; } if (TREE_CODE(t) == ADDR_EXPR || \ TREE_CODE(t) == COMPONENT_REF || \ TREE_CODE(t) == ARRAY_REF || \ (TREE_CODE(t) == VAR_DECL && mf_decl_eligible_p(t))) { DEBUGLOG("------ INSTRUMENTING NODES ---------\n"); temp = TREE_OPERAND(t, 0); if(temp && (TREE_CODE(temp) == STRING_CST || \ TREE_CODE(temp) == FUNCTION_DECL)) // TODO Check this out? What do you do in this case? return; DEBUGLOG("TREE_CODE(temp) : %s\n", tree_code_name[(int)TREE_CODE(temp)]); if (TREE_CODE(t) == VAR_DECL) *tp = mf_walk_n_instrument(tp, &instrumented); else TREE_OPERAND(t,0) = mf_walk_n_instrument(&(TREE_OPERAND(t,0)), &instrumented); if (TREE_CODE(t) == ADDR_EXPR) return; } DEBUGLOG("Pass2 derefs: entering deref section\n"); type_node = NULL_TREE; //TODO move this to appropriate cases t = *tp; switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: // TODO check if following works for comp refs { DEBUGLOG("------ INSIDE CASE COMPONENT_REF ---------\n"); HOST_WIDE_INT bitsize, bitpos; tree inner, offset; int volatilep, unsignedp; enum machine_mode mode1; check_red_flag = 1; inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode1, &unsignedp, &volatilep, false); if (!offset) offset = size_zero_node; offset = size_binop (PLUS_EXPR, offset, size_int (bitpos / BITS_PER_UNIT)); addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node, build1 (ADDR_EXPR, build_pointer_type(type), inner), offset); break; // TODO continue? } case INDIRECT_REF: DEBUGLOG("------ INSIDE CASE INDIRECT_REF ---------\n"); check_red_flag = 1; addr = TREE_OPERAND (t, 0); break; // TODO continue? case MEM_REF: DEBUGLOG("------ INSIDE CASE MEM_REF ---------\n"); check_red_flag = 1; addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)), TREE_OPERAND (t, 0), fold_convert (sizetype, TREE_OPERAND (t, 1))); break; case TARGET_MEM_REF: DEBUGLOG("------ INSIDE CASE TARGET_MEM_REF ---------\n"); check_red_flag = 1; addr = tree_mem_ref_addr (ptr_type_node, t); break; // TODO do you want to do this case? find out what it does. case ARRAY_RANGE_REF: DEBUGLOG("------ INSIDE CASE ARRAY_RANGE_REF ---------\n"); DEBUGLOG("------ TODO not handled yet---------\n"); return; case BIT_FIELD_REF: DEBUGLOG("------ INSIDE CASE BIT_FIELD_REF ---------\n"); DEBUGLOG("------ TODO not handled yet---------\n"); return; default: DEBUGLOG("------ INSIDE CASE DEFAULT ---------\n"); if(mf_decl_eligible_p(t)) { DEBUGLOG("Do you want to be here?\n"); return; /*if((*tp = mx_xform_instrument_pass2(t)) == NULL_TREE){ DEBUGLOG("Failed to set tree operand\n"); return; }*/ } } // Add the call to is_char_red if (check_red_flag) { DEBUGLOG("Entering is_char_red\n"); fncall_param_val = fold_build2_loc (location, MEM_REF, ptr_type_node, addr, \ build_int_cst(build_pointer_type(type), 0)); fncall_param_val = fold_convert_loc (location, unsigned_type_node, fncall_param_val); is_char_red_call = gimple_build_call (lbc_is_char_red_fndecl, 3, fncall_param_val, size, \ fold_convert_loc(location, ptr_type_node, addr)); gimple_set_location (is_char_red_call, location); //debug_gimple_stmt(is_char_red_call); gsi_insert_before (iter, is_char_red_call, GSI_SAME_STMT); DEBUGLOG("Done with is_char_red\n"); } DEBUGLOG("Exiting derefs \n"); }
tree c_finish_omp_ordered (tree stmt) { return add_stmt (build1 (OMP_ORDERED, void_type_node, stmt)); }
static bool generate_memset_zero (gimple stmt, tree op0, tree nb_iter, gimple_stmt_iterator bsi) { tree t, addr_base; tree nb_bytes = NULL; bool res = false; gimple_seq stmts = NULL, stmt_list = NULL; gimple fn_call; tree mem, fndecl, fntype, fn; gimple_stmt_iterator i; ssa_op_iter iter; struct data_reference *dr = XCNEW (struct data_reference); DR_STMT (dr) = stmt; DR_REF (dr) = op0; if (!dr_analyze_innermost (dr)) goto end; /* Test for a positive stride, iterating over every element. */ if (integer_zerop (fold_build2 (MINUS_EXPR, integer_type_node, DR_STEP (dr), TYPE_SIZE_UNIT (TREE_TYPE (op0))))) { tree offset = fold_convert (sizetype, size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr))); addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (DR_BASE_ADDRESS (dr)), DR_BASE_ADDRESS (dr), offset); } /* Test for a negative stride, iterating over every element. */ else if (integer_zerop (fold_build2 (PLUS_EXPR, integer_type_node, TYPE_SIZE_UNIT (TREE_TYPE (op0)), DR_STEP (dr)))) { nb_bytes = build_size_arg (nb_iter, op0, &stmt_list); addr_base = size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr)); addr_base = fold_build2 (MINUS_EXPR, sizetype, addr_base, nb_bytes); addr_base = force_gimple_operand (addr_base, &stmts, true, NULL); gimple_seq_add_seq (&stmt_list, stmts); addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (DR_BASE_ADDRESS (dr)), DR_BASE_ADDRESS (dr), addr_base); } else goto end; mem = force_gimple_operand (addr_base, &stmts, true, NULL); gimple_seq_add_seq (&stmt_list, stmts); fndecl = implicit_built_in_decls [BUILT_IN_MEMSET]; fntype = TREE_TYPE (fndecl); fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); if (!nb_bytes) nb_bytes = build_size_arg (nb_iter, op0, &stmt_list); fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes); gimple_seq_add_stmt (&stmt_list, fn_call); for (i = gsi_start (stmt_list); !gsi_end_p (i); gsi_next (&i)) { gimple s = gsi_stmt (i); update_stmt_if_modified (s); FOR_EACH_SSA_TREE_OPERAND (t, s, iter, SSA_OP_VIRTUAL_DEFS) { if (TREE_CODE (t) == SSA_NAME) t = SSA_NAME_VAR (t); mark_sym_for_renaming (t); } } /* Mark also the uses of the VDEFS of STMT to be renamed. */ FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_VIRTUAL_DEFS) { if (TREE_CODE (t) == SSA_NAME) { gimple s; imm_use_iterator imm_iter; FOR_EACH_IMM_USE_STMT (s, imm_iter, t) update_stmt (s); t = SSA_NAME_VAR (t); } mark_sym_for_renaming (t); } gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING); res = true; if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "generated memset zero\n"); todo |= TODO_rebuild_alias; end: free_data_ref (dr); return res; }
static void mf_build_check_statement_for (tree base, tree limit, block_stmt_iterator *instr_bsi, location_t *locus, tree dirflag) { tree_stmt_iterator head, tsi; block_stmt_iterator bsi; basic_block cond_bb, then_bb, join_bb; edge e; tree cond, t, u, v; tree mf_base; tree mf_elem; tree mf_limit; /* We first need to split the current basic block, and start altering the CFG. This allows us to insert the statements we're about to construct into the right basic blocks. */ cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi)); bsi = *instr_bsi; bsi_prev (&bsi); if (! bsi_end_p (bsi)) e = split_block (cond_bb, bsi_stmt (bsi)); else e = split_block_after_labels (cond_bb); cond_bb = e->src; join_bb = e->dest; /* A recap at this point: join_bb is the basic block at whose head is the gimple statement for which this check expression is being built. cond_bb is the (possibly new, synthetic) basic block the end of which will contain the cache-lookup code, and a conditional that jumps to the cache-miss code or, much more likely, over to join_bb. */ /* Create the bb that contains the cache-miss fallback block (mf_check). */ then_bb = create_empty_bb (cond_bb); make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE); make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU); /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */ e = find_edge (cond_bb, join_bb); e->flags = EDGE_FALSE_VALUE; e->count = cond_bb->count; e->probability = REG_BR_PROB_BASE; /* Update dominance info. Note that bb_join's data was updated by split_block. */ if (dom_info_available_p (CDI_DOMINATORS)) { set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb); set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb); } /* Build our local variables. */ mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem"); mf_base = create_tmp_var (mf_uintptr_type, "__mf_base"); mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit"); /* Build: __mf_base = (uintptr_t) <base address expression>. */ t = build2 (MODIFY_EXPR, void_type_node, mf_base, convert (mf_uintptr_type, unshare_expr (base))); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); head = tsi_start (t); tsi = tsi_last (t); /* Build: __mf_limit = (uintptr_t) <limit address expression>. */ t = build2 (MODIFY_EXPR, void_type_node, mf_limit, convert (mf_uintptr_type, unshare_expr (limit))); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift) & __mf_mask]. */ t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base, (flag_mudflap_threads ? mf_cache_shift_decl : mf_cache_shift_decl_l)); t = build2 (BIT_AND_EXPR, mf_uintptr_type, t, (flag_mudflap_threads ? mf_cache_mask_decl : mf_cache_mask_decl_l)); t = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (mf_cache_array_decl)), mf_cache_array_decl, t, NULL_TREE, NULL_TREE); t = build1 (ADDR_EXPR, mf_cache_structptr_type, t); t = build2 (MODIFY_EXPR, void_type_node, mf_elem, t); SET_EXPR_LOCUS (t, locus); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Quick validity check. if (__mf_elem->low > __mf_base || (__mf_elem_high < __mf_limit)) { __mf_check (); ... and only if single-threaded: __mf_lookup_shift_1 = f...; __mf_lookup_mask_l = ...; } It is expected that this body of code is rarely executed so we mark the edge to the THEN clause of the conditional jump as unlikely. */ /* Construct t <-- '__mf_elem->low > __mf_base'. */ t = build3 (COMPONENT_REF, mf_uintptr_type, build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), TYPE_FIELDS (mf_cache_struct_type), NULL_TREE); t = build2 (GT_EXPR, boolean_type_node, t, mf_base); /* Construct '__mf_elem->high < __mf_limit'. First build: 1) u <-- '__mf_elem->high' 2) v <-- '__mf_limit'. Then build 'u <-- (u < v). */ u = build3 (COMPONENT_REF, mf_uintptr_type, build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem), TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE); v = mf_limit; u = build2 (LT_EXPR, boolean_type_node, u, v); /* Build the composed conditional: t <-- 't || u'. Then store the result of the evaluation of 't' in a temporary variable which we can use as the condition for the conditional jump. */ t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u); cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond"); t = build2 (MODIFY_EXPR, boolean_type_node, cond, t); gimplify_to_stmt_list (&t); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* Build the conditional jump. 'cond' is just a temporary so we can simply build a void COND_EXPR. We do need labels in both arms though. */ t = build3 (COND_EXPR, void_type_node, cond, build1 (GOTO_EXPR, void_type_node, tree_block_label (then_bb)), build1 (GOTO_EXPR, void_type_node, tree_block_label (join_bb))); SET_EXPR_LOCUS (t, locus); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); /* At this point, after so much hard work, we have only constructed the conditional jump, if (__mf_elem->low > __mf_base || (__mf_elem_high < __mf_limit)) The lowered GIMPLE tree representing this code is in the statement list starting at 'head'. We can insert this now in the current basic block, i.e. the one that the statement we're instrumenting was originally in. */ bsi = bsi_last (cond_bb); for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); /* Now build up the body of the cache-miss handling: __mf_check(); refresh *_l vars. This is the body of the conditional. */ u = tree_cons (NULL_TREE, mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION : *locus), NULL_TREE); u = tree_cons (NULL_TREE, dirflag, u); /* NB: we pass the overall [base..limit] range to mf_check. */ u = tree_cons (NULL_TREE, fold_build2 (PLUS_EXPR, integer_type_node, fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base), integer_one_node), u); u = tree_cons (NULL_TREE, mf_base, u); t = build_function_call_expr (mf_check_fndecl, u); gimplify_to_stmt_list (&t); head = tsi_start (t); tsi = tsi_last (t); if (! flag_mudflap_threads) { t = build2 (MODIFY_EXPR, void_type_node, mf_cache_shift_decl_l, mf_cache_shift_decl); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); t = build2 (MODIFY_EXPR, void_type_node, mf_cache_mask_decl_l, mf_cache_mask_decl); tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING); } /* Insert the check code in the THEN block. */ bsi = bsi_start (then_bb); for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi)) bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING); *instr_bsi = bsi_start (join_bb); bsi_next (instr_bsi); }
static void output_gimple_stmt (struct output_block *ob, gimple stmt) { unsigned i; enum gimple_code code; enum LTO_tags tag; struct bitpack_d bp; histogram_value hist; /* Emit identifying tag. */ code = gimple_code (stmt); tag = lto_gimple_code_to_tag (code); streamer_write_record_start (ob, tag); /* Emit the tuple header. */ bp = bitpack_create (ob->main_stream); bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt)); bp_pack_value (&bp, gimple_no_warning_p (stmt), 1); if (is_gimple_assign (stmt)) bp_pack_value (&bp, gimple_assign_nontemporal_move_p ( as_a <gassign *> (stmt)), 1); bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1); hist = gimple_histogram_value (cfun, stmt); bp_pack_value (&bp, hist != NULL, 1); bp_pack_var_len_unsigned (&bp, stmt->subcode); /* Emit location information for the statement. */ stream_output_location (ob, &bp, LOCATION_LOCUS (gimple_location (stmt))); streamer_write_bitpack (&bp); /* Emit the lexical block holding STMT. */ stream_write_tree (ob, gimple_block (stmt), true); /* Emit the operands. */ switch (gimple_code (stmt)) { case GIMPLE_RESX: streamer_write_hwi (ob, gimple_resx_region (as_a <gresx *> (stmt))); break; case GIMPLE_EH_MUST_NOT_THROW: stream_write_tree (ob, gimple_eh_must_not_throw_fndecl ( as_a <geh_mnt *> (stmt)), true); break; case GIMPLE_EH_DISPATCH: streamer_write_hwi (ob, gimple_eh_dispatch_region ( as_a <geh_dispatch *> (stmt))); break; case GIMPLE_ASM: { gasm *asm_stmt = as_a <gasm *> (stmt); streamer_write_uhwi (ob, gimple_asm_ninputs (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_noutputs (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_nclobbers (asm_stmt)); streamer_write_uhwi (ob, gimple_asm_nlabels (asm_stmt)); streamer_write_string (ob, ob->main_stream, gimple_asm_string (asm_stmt), true); } /* Fallthru */ case GIMPLE_ASSIGN: case GIMPLE_CALL: case GIMPLE_RETURN: case GIMPLE_SWITCH: case GIMPLE_LABEL: case GIMPLE_COND: case GIMPLE_GOTO: case GIMPLE_DEBUG: for (i = 0; i < gimple_num_ops (stmt); i++) { tree op = gimple_op (stmt, i); tree *basep = NULL; /* Wrap all uses of non-automatic variables inside MEM_REFs so that we do not have to deal with type mismatches on merged symbols during IL read in. The first operand of GIMPLE_DEBUG must be a decl, not MEM_REF, though. */ if (op && (i || !is_gimple_debug (stmt))) { basep = &op; if (TREE_CODE (*basep) == ADDR_EXPR) basep = &TREE_OPERAND (*basep, 0); while (handled_component_p (*basep)) basep = &TREE_OPERAND (*basep, 0); if (TREE_CODE (*basep) == VAR_DECL && !auto_var_in_fn_p (*basep, current_function_decl) && !DECL_REGISTER (*basep)) { bool volatilep = TREE_THIS_VOLATILE (*basep); tree ptrtype = build_pointer_type (TREE_TYPE (*basep)); *basep = build2 (MEM_REF, TREE_TYPE (*basep), build1 (ADDR_EXPR, ptrtype, *basep), build_int_cst (ptrtype, 0)); TREE_THIS_VOLATILE (*basep) = volatilep; } else basep = NULL; } stream_write_tree (ob, op, true); /* Restore the original base if we wrapped it inside a MEM_REF. */ if (basep) *basep = TREE_OPERAND (TREE_OPERAND (*basep, 0), 0); } if (is_gimple_call (stmt)) { if (gimple_call_internal_p (stmt)) streamer_write_enum (ob->main_stream, internal_fn, IFN_LAST, gimple_call_internal_fn (stmt)); else stream_write_tree (ob, gimple_call_fntype (stmt), true); } break; case GIMPLE_NOP: case GIMPLE_PREDICT: break; case GIMPLE_TRANSACTION: { gtransaction *trans_stmt = as_a <gtransaction *> (stmt); gcc_assert (gimple_transaction_body (trans_stmt) == NULL); stream_write_tree (ob, gimple_transaction_label (trans_stmt), true); } break; default: gcc_unreachable (); } if (hist) stream_out_histogram_value (ob, hist); }
static void mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp, location_t *locus, tree dirflag) { tree type, base, limit, addr, size, t; /* Don't instrument read operations. */ if (dirflag == integer_zero_node && flag_mudflap_ignore_reads) return; /* Don't instrument marked nodes. */ if (mf_marked_p (*tp)) return; t = *tp; type = TREE_TYPE (t); if (type == error_mark_node) return; size = TYPE_SIZE_UNIT (type); switch (TREE_CODE (t)) { case ARRAY_REF: case COMPONENT_REF: { /* This is trickier than it may first appear. The reason is that we are looking at expressions from the "inside out" at this point. We may have a complex nested aggregate/array expression (e.g. "a.b[i].c"), maybe with an indirection as the leftmost operator ("p->a.b.d"), where instrumentation is necessary. Or we may have an innocent "a.b.c" expression that must not be instrumented. We need to recurse all the way down the nesting structure to figure it out: looking just at the outer node is not enough. */ tree var; int component_ref_only = (TREE_CODE (t) == COMPONENT_REF); /* If we have a bitfield component reference, we must note the innermost addressable object in ELT, from which we will construct the byte-addressable bounds of the bitfield. */ tree elt = NULL_TREE; int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1))); /* Iterate to the top of the ARRAY_REF/COMPONENT_REF containment hierarchy to find the outermost VAR_DECL. */ var = TREE_OPERAND (t, 0); while (1) { if (bitfield_ref_p && elt == NULL_TREE && (TREE_CODE (var) == ARRAY_REF || TREE_CODE (var) == COMPONENT_REF)) elt = var; if (TREE_CODE (var) == ARRAY_REF) { component_ref_only = 0; var = TREE_OPERAND (var, 0); } else if (TREE_CODE (var) == COMPONENT_REF) var = TREE_OPERAND (var, 0); else if (INDIRECT_REF_P (var)) { base = TREE_OPERAND (var, 0); break; } else { gcc_assert (TREE_CODE (var) == VAR_DECL || TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL || TREE_CODE (var) == STRING_CST); /* Don't instrument this access if the underlying variable is not "eligible". This test matches those arrays that have only known-valid indexes, and thus are not labeled TREE_ADDRESSABLE. */ if (! mf_decl_eligible_p (var) || component_ref_only) return; else { base = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (var)), var); break; } } } /* Handle the case of ordinary non-indirection structure accesses. These have only nested COMPONENT_REF nodes (no INDIRECT_REF), but pass through the above filter loop. Note that it's possible for such a struct variable to match the eligible_p test because someone else might take its address sometime. */ /* We need special processing for bitfield components, because their addresses cannot be taken. */ if (bitfield_ref_p) { tree field = TREE_OPERAND (t, 1); if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) size = DECL_SIZE_UNIT (field); if (elt) elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)), elt); addr = fold_convert (ptr_type_node, elt ? elt : base); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, fold_convert (ptr_type_node, byte_position (field))); } else addr = build1 (ADDR_EXPR, build_pointer_type (type), t); limit = fold_build2 (MINUS_EXPR, mf_uintptr_type, fold_build2 (PLUS_EXPR, mf_uintptr_type, convert (mf_uintptr_type, addr), size), integer_one_node); } break; case INDIRECT_REF: addr = TREE_OPERAND (t, 0); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); break; case TARGET_MEM_REF: addr = tree_mem_ref_addr (ptr_type_node, t); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), build_int_cst (ptr_type_node, 1)); break; case ARRAY_RANGE_REF: warning (0, "mudflap checking not yet implemented for ARRAY_RANGE_REF"); return; case BIT_FIELD_REF: /* ??? merge with COMPONENT_REF code above? */ { tree ofs, rem, bpu; /* If we're not dereferencing something, then the access must be ok. */ if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF) return; bpu = bitsize_int (BITS_PER_UNIT); ofs = convert (bitsizetype, TREE_OPERAND (t, 2)); rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu); ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu); size = convert (bitsizetype, TREE_OPERAND (t, 1)); size = size_binop (PLUS_EXPR, size, rem); size = size_binop (CEIL_DIV_EXPR, size, bpu); size = convert (sizetype, size); addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0); addr = convert (ptr_type_node, addr); addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs); base = addr; limit = fold_build2 (MINUS_EXPR, ptr_type_node, fold_build2 (PLUS_EXPR, ptr_type_node, base, size), integer_one_node); } break; default: return; } mf_build_check_statement_for (base, limit, iter, locus, dirflag); }
tree convert_to_reference (tree reftype, tree expr, int convtype, int flags, tree decl) { tree type = TYPE_MAIN_VARIANT (TREE_TYPE (reftype)); tree intype; tree rval = NULL_TREE; tree rval_as_conversion = NULL_TREE; bool can_convert_intype_to_type; if (TREE_CODE (type) == FUNCTION_TYPE && TREE_TYPE (expr) == unknown_type_node) expr = instantiate_type (type, expr, (flags & LOOKUP_COMPLAIN) ? tf_warning_or_error : tf_none); if (expr == error_mark_node) return error_mark_node; intype = TREE_TYPE (expr); gcc_assert (TREE_CODE (intype) != REFERENCE_TYPE); gcc_assert (TREE_CODE (reftype) == REFERENCE_TYPE); intype = TYPE_MAIN_VARIANT (intype); can_convert_intype_to_type = can_convert (type, intype); if (!can_convert_intype_to_type && (convtype & CONV_IMPLICIT) && MAYBE_CLASS_TYPE_P (intype) && ! (flags & LOOKUP_NO_CONVERSION)) { /* Look for a user-defined conversion to lvalue that we can use. */ rval_as_conversion = build_type_conversion (reftype, expr); if (rval_as_conversion && rval_as_conversion != error_mark_node && real_lvalue_p (rval_as_conversion)) { expr = rval_as_conversion; rval_as_conversion = NULL_TREE; intype = type; can_convert_intype_to_type = 1; } } if (((convtype & CONV_STATIC) && can_convert (intype, type)) || ((convtype & CONV_IMPLICIT) && can_convert_intype_to_type)) { if (flags & LOOKUP_COMPLAIN) { tree ttl = TREE_TYPE (reftype); tree ttr = lvalue_type (expr); if (! real_lvalue_p (expr)) warn_ref_binding (reftype, intype, decl); if (! (convtype & CONV_CONST) && !at_least_as_qualified_p (ttl, ttr)) permerror (input_location, "conversion from %qT to %qT discards qualifiers", ttr, reftype); } return build_up_reference (reftype, expr, flags, decl); } else if ((convtype & CONV_REINTERPRET) && lvalue_p (expr)) { /* When casting an lvalue to a reference type, just convert into a pointer to the new type and deference it. This is allowed by San Diego WP section 5.2.9 paragraph 12, though perhaps it should be done directly (jason). (int &)ri ---> *(int*)&ri */ /* B* bp; A& ar = (A&)bp; is valid, but it's probably not what they meant. */ if (TREE_CODE (intype) == POINTER_TYPE && (comptypes (TREE_TYPE (intype), type, COMPARE_BASE | COMPARE_DERIVED))) warning (0, "casting %qT to %qT does not dereference pointer", intype, reftype); rval = cp_build_unary_op (ADDR_EXPR, expr, 0, tf_warning_or_error); if (rval != error_mark_node) rval = convert_force (build_pointer_type (TREE_TYPE (reftype)), rval, 0); if (rval != error_mark_node) rval = build1 (NOP_EXPR, reftype, rval); } else { rval = convert_for_initialization (NULL_TREE, type, expr, flags, "converting", 0, 0, tf_warning_or_error); if (rval == NULL_TREE || rval == error_mark_node) return rval; warn_ref_binding (reftype, intype, decl); rval = build_up_reference (reftype, rval, flags, decl); } if (rval) { /* If we found a way to convert earlier, then use it. */ return rval; } if (flags & LOOKUP_COMPLAIN) error ("cannot convert type %qT to type %qT", intype, reftype); return error_mark_node; }
/* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of _DECLs if appropriate. Arrange to call the __mf_register function now, and the __mf_unregister function later for each. */ static void mx_register_decls (tree decl, tree *stmt_list) { tree finally_stmts = NULL_TREE; tree_stmt_iterator initially_stmts = tsi_start (*stmt_list); while (decl != NULL_TREE) { if (mf_decl_eligible_p (decl) /* Not already processed. */ && ! mf_marked_p (decl) /* Automatic variable. */ && ! DECL_EXTERNAL (decl) && ! TREE_STATIC (decl)) { tree size = NULL_TREE, variable_name; tree unregister_fncall, unregister_fncall_params; tree register_fncall, register_fncall_params; size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl))); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */ unregister_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), NULL_TREE))); /* __mf_unregister (...) */ unregister_fncall = build_function_call_expr (mf_unregister_fndecl, unregister_fncall_params); /* (& VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK, "name") */ variable_name = mf_varname_tree (decl); register_fncall_params = tree_cons (NULL_TREE, convert (ptr_type_node, mf_mark (build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl))), tree_cons (NULL_TREE, size, tree_cons (NULL_TREE, /* __MF_TYPE_STACK */ build_int_cst (NULL_TREE, 3), tree_cons (NULL_TREE, variable_name, NULL_TREE)))); /* __mf_register (...) */ register_fncall = build_function_call_expr (mf_register_fndecl, register_fncall_params); /* Accumulate the two calls. */ /* ??? Set EXPR_LOCATION. */ gimplify_stmt (®ister_fncall); gimplify_stmt (&unregister_fncall); /* Add the __mf_register call at the current appending point. */ if (tsi_end_p (initially_stmts)) warning (0, "mudflap cannot track %qs in stub function", IDENTIFIER_POINTER (DECL_NAME (decl))); else { tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT); /* Accumulate the FINALLY piece. */ append_to_statement_list (unregister_fncall, &finally_stmts); } mf_mark (decl); } decl = TREE_CHAIN (decl); } /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */ if (finally_stmts != NULL_TREE) { tree t = build2 (TRY_FINALLY_EXPR, void_type_node, *stmt_list, finally_stmts); *stmt_list = NULL; append_to_statement_list (t, stmt_list); } }
tree convert_to_void (tree expr, const char *implicit, tsubst_flags_t complain) { if (expr == error_mark_node || TREE_TYPE (expr) == error_mark_node) return error_mark_node; if (!TREE_TYPE (expr)) return expr; if (invalid_nonstatic_memfn_p (expr, complain)) return error_mark_node; if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR) { if (complain & tf_error) error ("pseudo-destructor is not called"); return error_mark_node; } if (VOID_TYPE_P (TREE_TYPE (expr))) return expr; switch (TREE_CODE (expr)) { case COND_EXPR: { /* The two parts of a cond expr might be separate lvalues. */ tree op1 = TREE_OPERAND (expr,1); tree op2 = TREE_OPERAND (expr,2); tree new_op1 = convert_to_void (op1, (implicit && !TREE_SIDE_EFFECTS (op2) ? "second operand of conditional" : NULL), complain); tree new_op2 = convert_to_void (op2, (implicit && !TREE_SIDE_EFFECTS (op1) ? "third operand of conditional" : NULL), complain); expr = build3 (COND_EXPR, TREE_TYPE (new_op1), TREE_OPERAND (expr, 0), new_op1, new_op2); break; } case COMPOUND_EXPR: { /* The second part of a compound expr contains the value. */ tree op1 = TREE_OPERAND (expr,1); tree new_op1 = convert_to_void (op1, (implicit && !TREE_NO_WARNING (expr) ? "right-hand operand of comma" : NULL), complain); if (new_op1 != op1) { tree t = build2 (COMPOUND_EXPR, TREE_TYPE (new_op1), TREE_OPERAND (expr, 0), new_op1); expr = t; } break; } case NON_LVALUE_EXPR: case NOP_EXPR: /* These have already decayed to rvalue. */ break; case CALL_EXPR: /* We have a special meaning for volatile void fn(). */ break; case INDIRECT_REF: { tree type = TREE_TYPE (expr); int is_reference = TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == REFERENCE_TYPE; int is_volatile = TYPE_VOLATILE (type); int is_complete = COMPLETE_TYPE_P (complete_type (type)); /* Can't load the value if we don't know the type. */ if (is_volatile && !is_complete) { if (complain & tf_warning) warning (0, "object of incomplete type %qT will not be accessed in %s", type, implicit ? implicit : "void context"); } /* Don't load the value if this is an implicit dereference, or if the type needs to be handled by ctors/dtors. */ else if (is_volatile && (is_reference || TREE_ADDRESSABLE (type))) { if (complain & tf_warning) warning (0, "object of type %qT will not be accessed in %s", TREE_TYPE (TREE_OPERAND (expr, 0)), implicit ? implicit : "void context"); } if (is_reference || !is_volatile || !is_complete || TREE_ADDRESSABLE (type)) { /* Emit a warning (if enabled) when the "effect-less" INDIRECT_REF operation is stripped off. Note that we don't warn about - an expression with TREE_NO_WARNING set. (For an example of such expressions, see build_over_call in call.c.) - automatic dereferencing of references, since the user cannot control it. (See also warn_if_unused_value() in stmt.c.) */ if (warn_unused_value && implicit && (complain & tf_warning) && !TREE_NO_WARNING (expr) && !is_reference) warning (OPT_Wunused_value, "value computed is not used"); expr = TREE_OPERAND (expr, 0); } break; } case VAR_DECL: { /* External variables might be incomplete. */ tree type = TREE_TYPE (expr); int is_complete = COMPLETE_TYPE_P (complete_type (type)); if (TYPE_VOLATILE (type) && !is_complete && (complain & tf_warning)) warning (0, "object %qE of incomplete type %qT will not be accessed in %s", expr, type, implicit ? implicit : "void context"); break; } case TARGET_EXPR: /* Don't bother with the temporary object returned from a function if we don't use it and don't need to destroy it. We'll still allocate space for it in expand_call or declare_return_variable, but we don't need to track it through all the tree phases. */ if (TARGET_EXPR_IMPLICIT_P (expr) && TYPE_HAS_TRIVIAL_DESTRUCTOR (TREE_TYPE (expr))) { tree init = TARGET_EXPR_INITIAL (expr); if (TREE_CODE (init) == AGGR_INIT_EXPR && !AGGR_INIT_VIA_CTOR_P (init)) { tree fn = AGGR_INIT_EXPR_FN (init); expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), fn, aggr_init_expr_nargs (init), AGGR_INIT_EXPR_ARGP (init)); } } break; default:; } { tree probe = expr; if (TREE_CODE (probe) == ADDR_EXPR) probe = TREE_OPERAND (expr, 0); if (type_unknown_p (probe)) { /* [over.over] enumerates the places where we can take the address of an overloaded function, and this is not one of them. */ if (complain & tf_error) error ("%s cannot resolve address of overloaded function", implicit ? implicit : "void cast"); else return error_mark_node; expr = void_zero_node; } else if (implicit && probe == expr && is_overloaded_fn (probe)) { /* Only warn when there is no &. */ if (complain & tf_warning) warning (OPT_Waddress, "%s is a reference, not call, to function %qE", implicit, expr); if (TREE_CODE (expr) == COMPONENT_REF) expr = TREE_OPERAND (expr, 0); } } if (expr != error_mark_node && !VOID_TYPE_P (TREE_TYPE (expr))) { if (implicit && warn_unused_value && !TREE_NO_WARNING (expr) && !processing_template_decl) { /* The middle end does not warn about expressions that have been explicitly cast to void, so we must do so here. */ if (!TREE_SIDE_EFFECTS (expr)) { if (complain & tf_warning) warning (OPT_Wunused_value, "%s has no effect", implicit); } else { tree e; enum tree_code code; enum tree_code_class tclass; e = expr; /* We might like to warn about (say) "(int) f()", as the cast has no effect, but the compiler itself will generate implicit conversions under some circumstances. (For example a block copy will be turned into a call to "__builtin_memcpy", with a conversion of the return value to an appropriate type.) So, to avoid false positives, we strip conversions. Do not use STRIP_NOPs because it will not strip conversions to "void", as that is not a mode-preserving conversion. */ while (TREE_CODE (e) == NOP_EXPR) e = TREE_OPERAND (e, 0); code = TREE_CODE (e); tclass = TREE_CODE_CLASS (code); if ((tclass == tcc_comparison || tclass == tcc_unary || (tclass == tcc_binary && !(code == MODIFY_EXPR || code == INIT_EXPR || code == PREDECREMENT_EXPR || code == PREINCREMENT_EXPR || code == POSTDECREMENT_EXPR || code == POSTINCREMENT_EXPR))) && (complain & tf_warning)) warning (OPT_Wunused_value, "value computed is not used"); } } expr = build1 (CONVERT_EXPR, void_type_node, expr); } if (! TREE_SIDE_EFFECTS (expr)) expr = void_zero_node; return expr; }
tree expand_start_catch_block (tree decl) { tree exp; tree type, init; if (! doing_eh ()) return NULL_TREE; if (decl) { if (!is_admissible_throw_operand_or_catch_parameter (decl, false)) decl = error_mark_node; type = prepare_eh_type (TREE_TYPE (decl)); mark_used (eh_type_info (type)); } else type = NULL_TREE; if (decl && decl_is_java_type (type, 1)) { /* Java only passes object via pointer and doesn't require adjusting. The java object is immediately before the generic exception header. */ exp = build_exc_ptr (); exp = build1 (NOP_EXPR, build_pointer_type (type), exp); exp = fold_build_pointer_plus (exp, fold_build1_loc (input_location, NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (TREE_TYPE (exp)))); exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error); initialize_handler_parm (decl, exp); return type; } /* Call __cxa_end_catch at the end of processing the exception. */ push_eh_cleanup (type); init = do_begin_catch (); /* If there's no decl at all, then all we need to do is make sure to tell the runtime that we've begun handling the exception. */ if (decl == NULL || decl == error_mark_node || init == error_mark_node) finish_expr_stmt (init); /* If the C++ object needs constructing, we need to do that before calling __cxa_begin_catch, so that std::uncaught_exception gets the right value during the copy constructor. */ else if (flag_use_cxa_get_exception_ptr && TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) { exp = do_get_exception_ptr (); initialize_handler_parm (decl, exp); finish_expr_stmt (init); } /* Otherwise the type uses a bitwise copy, and we don't have to worry about the value of std::uncaught_exception and therefore can do the copy with the return value of __cxa_end_catch instead. */ else { tree init_type = type; /* Pointers are passed by values, everything else by reference. */ if (!TYPE_PTR_P (type)) init_type = build_pointer_type (type); if (init_type != TREE_TYPE (init)) init = build1 (NOP_EXPR, init_type, init); exp = create_temporary_var (init_type); DECL_REGISTER (exp) = 1; cp_finish_decl (exp, init, /*init_const_expr=*/false, NULL_TREE, LOOKUP_ONLYCONVERTING); initialize_handler_parm (decl, exp); } return type; }
tree prepare_eh_table_type (tree type) { tree exp; tree *slot; const char *name; char *buf; tree decl; tree utf8_ref; /* The "type" (match_info) in a (Java) exception table is a pointer to: * a) NULL - meaning match any type in a try-finally. * b) a pointer to a pointer to a class. * c) a pointer to a pointer to a utf8_ref. The pointer is * rewritten to point to the appropriate class. */ if (type == NULL_TREE) return NULL_TREE; if (TYPE_TO_RUNTIME_MAP (output_class) == NULL) TYPE_TO_RUNTIME_MAP (output_class) = java_treetreehash_create (10); slot = java_treetreehash_new (TYPE_TO_RUNTIME_MAP (output_class), type); if (*slot != NULL) return TREE_VALUE (*slot); if (is_compiled_class (type) && !flag_indirect_dispatch) { name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), ptr_type_node); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; DECL_INITIAL (decl) = build_class_ref (type); layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (decl)), decl); } else { utf8_ref = build_utf8_ref (DECL_NAME (TYPE_NAME (type))); name = IDENTIFIER_POINTER (DECL_NAME (TREE_OPERAND (utf8_ref, 0))); buf = (char *) alloca (strlen (name) + 5); sprintf (buf, "%s_ref", name); decl = build_decl (input_location, VAR_DECL, get_identifier (buf), utf8const_ptr_type); TREE_STATIC (decl) = 1; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; TREE_READONLY (decl) = 1; TREE_THIS_VOLATILE (decl) = 0; layout_decl (decl, 0); pushdecl (decl); exp = build1 (ADDR_EXPR, build_pointer_type (utf8const_ptr_type), decl); CONSTRUCTOR_APPEND_ELT (TYPE_CATCH_CLASSES (output_class), NULL_TREE, make_catch_class_record (exp, utf8_ref)); } exp = convert (ptr_type_node, exp); *slot = tree_cons (type, exp, NULL_TREE); return exp; }
static void finish_handler_array () { tree decl = current_handler->handler_array_decl; tree t; tree handler_array_init = NULL_TREE; int handlers_count = 1; int nelts; /* Build the table mapping exceptions to handler(-number)s. This is done in reverse order. */ /* First push the end of the list. This is either the ELSE handler (current_handler->else_handler>0) or NULL handler to indicate the end of the list (if current_handler->else-handler == 0). The following works either way. */ handler_array_init = build_tree_list (NULL_TREE, chill_expand_tuple (handler_element_type, build_nt (CONSTRUCTOR, NULL_TREE, tree_cons (NULL_TREE, null_pointer_node, build_tree_list (NULL_TREE, build_int_2 (current_handler->else_handler, 0)))))); for (t = current_handler->on_alt_list; t != NULL_TREE; t = TREE_CHAIN (t)) { tree handler_number = TREE_PURPOSE(t); tree elist = TREE_VALUE (t); for ( ; elist != NULL_TREE; elist = TREE_CHAIN (elist)) { tree ex_decl = build_chill_exception_decl (IDENTIFIER_POINTER(TREE_VALUE(elist))); tree ex_addr = build1 (ADDR_EXPR, char_pointer_type_for_handler, ex_decl); tree el = build_nt (CONSTRUCTOR, NULL_TREE, tree_cons (NULL_TREE, ex_addr, build_tree_list (NULL_TREE, handler_number))); mark_addressable (ex_decl); TREE_CONSTANT (ex_addr) = 1; handler_array_init = tree_cons (NULL_TREE, chill_expand_tuple (handler_element_type, el), handler_array_init); handlers_count++; } } #if 1 nelts = list_length (handler_array_init); TYPE_DOMAIN (TREE_TYPE (decl)) = build_index_type (build_int_2 (nelts - 1, - (nelts == 0))); layout_type (TREE_TYPE (decl)); DECL_INITIAL (decl) = convert (TREE_TYPE (decl), build_nt (CONSTRUCTOR, NULL_TREE, handler_array_init)); /* Pop back to the obstack that is current for this binding level. This is because MAXINDEX, rtl, etc. to be made below must go in the permanent obstack. But don't discard the temporary data yet. */ pop_obstacks (); layout_decl (decl, 0); /* To prevent make_decl_rtl (called indiectly by rest_of_decl_compilation) throwing the existing RTL (which has already been used). */ PUT_MODE (DECL_RTL (decl), DECL_MODE (decl)); rest_of_decl_compilation (decl, (char*)0, 0, 0); expand_decl_init (decl); #else /* To prevent make_decl_rtl (called indirectly by finish_decl) altering the existing RTL. */ GET_MODE (DECL_RTL (current_handler->handler_array_decl)) = DECL_MODE (current_handler->handler_array_decl); finish_decl (current_handler->handler_array_decl, build_nt (CONSTRUCTOR, NULL_TREE, handler_array_init), NULL_TREE); #endif }