tree get_typeid (tree type) { if (type == error_mark_node || !typeid_ok_p ()) return error_mark_node; if (processing_template_decl) return build_min (TYPEID_EXPR, const_type_info_type_node, type); /* If the type of the type-id is a reference type, the result of the typeid expression refers to a type_info object representing the referenced type. */ type = non_reference (type); /* The top-level cv-qualifiers of the lvalue expression or the type-id that is the operand of typeid are always ignored. */ type = TYPE_MAIN_VARIANT (type); if (!VOID_TYPE_P (type)) type = complete_type_or_else (type, NULL_TREE); if (!type) return error_mark_node; return build_indirect_ref (get_tinfo_ptr (type), NULL); }
tree build_dynamic_cast (tree type, tree expr) { if (type == error_mark_node || expr == error_mark_node) return error_mark_node; if (processing_template_decl) { expr = build_min (DYNAMIC_CAST_EXPR, type, expr); TREE_SIDE_EFFECTS (expr) = 1; return expr; } return convert_from_reference (build_dynamic_cast_1 (type, expr)); }
tree build_typeid (tree exp) { tree cond = NULL_TREE; int nonnull = 0; if (exp == error_mark_node || !typeid_ok_p ()) return error_mark_node; if (processing_template_decl) return build_min (TYPEID_EXPR, const_type_info_type_node, exp); if (TREE_CODE (exp) == INDIRECT_REF && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE && TYPE_POLYMORPHIC_P (TREE_TYPE (exp)) && ! resolves_to_fixed_type_p (exp, &nonnull) && ! nonnull) { exp = stabilize_reference (exp); cond = cp_convert (boolean_type_node, TREE_OPERAND (exp, 0)); } exp = get_tinfo_decl_dynamic (exp); if (exp == error_mark_node) return error_mark_node; if (cond) { tree bad = throw_bad_typeid (); exp = build3 (COND_EXPR, TREE_TYPE (exp), cond, exp, bad); } return exp; }
tree build_throw (tree exp) { if (exp == error_mark_node) return exp; if (processing_template_decl) { if (cfun) current_function_returns_abnormally = 1; exp = build_min (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; } if (exp && null_node_p (exp)) warning (0, "throwing NULL, which has integral, not pointer type"); if (exp != NULL_TREE) { if (!is_admissible_throw_operand_or_catch_parameter (exp, true)) return error_mark_node; } if (! doing_eh ()) return error_mark_node; if (exp) { tree throw_type; tree temp_type; tree cleanup; tree object, ptr; tree tmp; tree allocate_expr; /* The CLEANUP_TYPE is the internal type of a destructor. */ if (!cleanup_type) { tmp = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); cleanup_type = build_pointer_type (tmp); } if (!throw_fn) { tree name = get_identifier ("__cxa_throw"); throw_fn = get_global_binding (name); if (!throw_fn) { /* Declare void __cxa_throw (void*, void*, void (*)(void*)). */ /* ??? Second argument is supposed to be "std::type_info*". */ tmp = build_function_type_list (void_type_node, ptr_type_node, ptr_type_node, cleanup_type, NULL_TREE); throw_fn = push_throw_library_fn (name, tmp); if (flag_tm) { tree itm_name = get_identifier ("_ITM_cxa_throw"); tree itm_fn = get_global_binding (itm_name); if (!itm_fn) itm_fn = push_throw_library_fn (itm_name, tmp); apply_tm_attr (itm_fn, get_identifier ("transaction_pure")); record_tm_replacement (throw_fn, itm_fn); } } } /* [except.throw] A throw-expression initializes a temporary object, the type of which is determined by removing any top-level cv-qualifiers from the static type of the operand of throw and adjusting the type from "array of T" or "function return T" to "pointer to T" or "pointer to function returning T" respectively. */ temp_type = is_bitfield_expr_with_lowered_type (exp); if (!temp_type) temp_type = cv_unqualified (type_decays_to (TREE_TYPE (exp))); /* OK, this is kind of wacky. The standard says that we call terminate when the exception handling mechanism, after completing evaluation of the expression to be thrown but before the exception is caught (_except.throw_), calls a user function that exits via an uncaught exception. So we have to protect the actual initialization of the exception object with terminate(), but evaluate the expression first. Since there could be temps in the expression, we need to handle that, too. We also expand the call to __cxa_allocate_exception first (which doesn't matter, since it can't throw). */ /* Allocate the space for the exception. */ allocate_expr = do_allocate_exception (temp_type); allocate_expr = get_target_expr (allocate_expr); ptr = TARGET_EXPR_SLOT (allocate_expr); TARGET_EXPR_CLEANUP (allocate_expr) = do_free_exception (ptr); CLEANUP_EH_ONLY (allocate_expr) = 1; object = build_nop (build_pointer_type (temp_type), ptr); object = cp_build_fold_indirect_ref (object); /* And initialize the exception object. */ if (CLASS_TYPE_P (temp_type)) { int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING; vec<tree, va_gc> *exp_vec; bool converted = false; /* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes treated as an rvalue for the purposes of overload resolution to favor move constructors over copy constructors. */ if (/* Must be a local, automatic variable. */ VAR_P (exp) && DECL_CONTEXT (exp) == current_function_decl && ! TREE_STATIC (exp) /* The variable must not have the `volatile' qualifier. */ && !(cp_type_quals (TREE_TYPE (exp)) & TYPE_QUAL_VOLATILE)) { tree moved = move (exp); exp_vec = make_tree_vector_single (moved); moved = (build_special_member_call (object, complete_ctor_identifier, &exp_vec, TREE_TYPE (object), flags|LOOKUP_PREFER_RVALUE, tf_none)); release_tree_vector (exp_vec); if (moved != error_mark_node) { exp = moved; converted = true; } } /* Call the copy constructor. */ if (!converted) { exp_vec = make_tree_vector_single (exp); exp = (build_special_member_call (object, complete_ctor_identifier, &exp_vec, TREE_TYPE (object), flags, tf_warning_or_error)); release_tree_vector (exp_vec); } if (exp == error_mark_node) { error (" in thrown expression"); return error_mark_node; } } else { tmp = decay_conversion (exp, tf_warning_or_error); if (tmp == error_mark_node) return error_mark_node; exp = build2 (INIT_EXPR, temp_type, object, tmp); } /* Mark any cleanups from the initialization as MUST_NOT_THROW, since they are run after the exception object is initialized. */ cp_walk_tree_without_duplicates (&exp, wrap_cleanups_r, 0); /* Prepend the allocation. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (exp), allocate_expr, exp); /* Force all the cleanups to be evaluated here so that we don't have to do them during unwinding. */ exp = build1 (CLEANUP_POINT_EXPR, void_type_node, exp); throw_type = build_eh_type_type (prepare_eh_type (TREE_TYPE (object))); cleanup = NULL_TREE; if (type_build_dtor_call (TREE_TYPE (object))) { tree dtor_fn = lookup_fnfields (TYPE_BINFO (TREE_TYPE (object)), complete_dtor_identifier, 0); dtor_fn = BASELINK_FUNCTIONS (dtor_fn); mark_used (dtor_fn); if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (object))) { cxx_mark_addressable (dtor_fn); /* Pretend it's a normal function. */ cleanup = build1 (ADDR_EXPR, cleanup_type, dtor_fn); } } if (cleanup == NULL_TREE) cleanup = build_int_cst (cleanup_type, 0); /* ??? Indicate that this function call throws throw_type. */ tmp = cp_build_function_call_nary (throw_fn, tf_warning_or_error, ptr, throw_type, cleanup, NULL_TREE); /* Tack on the initialization stuff. */ exp = build2 (COMPOUND_EXPR, TREE_TYPE (tmp), exp, tmp); } else { /* Rethrow current exception. */ if (!rethrow_fn) { tree name = get_identifier ("__cxa_rethrow"); rethrow_fn = get_global_binding (name); if (!rethrow_fn) /* Declare void __cxa_rethrow (void). */ rethrow_fn = push_throw_library_fn (name, build_function_type_list (void_type_node, NULL_TREE)); if (flag_tm) apply_tm_attr (rethrow_fn, get_identifier ("transaction_pure")); } /* ??? Indicate that this function call allows exceptions of the type of the enclosing catch block (if known). */ exp = cp_build_function_call_vec (rethrow_fn, NULL, tf_warning_or_error); } exp = build1 (THROW_EXPR, void_type_node, exp); SET_EXPR_LOCATION (exp, input_location); return exp; }
void maybe_add_lambda_conv_op (tree type) { bool nested = (cfun != NULL); bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); tree callop = lambda_function (type); if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) return; if (processing_template_decl) return; bool const generic_lambda_p = (DECL_TEMPLATE_INFO (callop) && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop); if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) { /* If the op() wasn't instantiated due to errors, give up. */ gcc_assert (errorcount || sorrycount); return; } /* Non-template conversion operators are defined directly with build_call_a and using DIRECT_ARGVEC for arguments (including 'this'). Templates are deferred and the CALL is built in-place. In the case of a deduced return call op, the decltype expression, DECLTYPE_CALL, used as a substitute for the return type is also built in-place. The arguments of DECLTYPE_CALL in the return expression may differ in flags from those in the body CALL. In particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in the body CALL, but not in DECLTYPE_CALL. */ vec<tree, va_gc> *direct_argvec = 0; tree decltype_call = 0, call = 0; tree fn_result = TREE_TYPE (TREE_TYPE (callop)); if (generic_lambda_p) { /* Prepare the dependent member call for the static member function '_FUN' and, potentially, prepare another call to be used in a decltype return expression for a deduced return call op to allow for simple implementation of the conversion operator. */ tree instance = build_nop (type, null_pointer_node); tree objfn = build_min (COMPONENT_REF, NULL_TREE, instance, DECL_NAME (callop), NULL_TREE); int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; call = prepare_op_call (objfn, nargs); if (type_uses_auto (fn_result)) decltype_call = prepare_op_call (objfn, nargs); } else { direct_argvec = make_tree_vector (); direct_argvec->quick_push (build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)), null_pointer_node)); } /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to declare the static member function "_FUN" below. For each arg append to DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated call args (for the template case). If a parameter pack is found, expand it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ tree fn_args = NULL_TREE; { int ix = 0; tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); tree tgt; while (src) { tree new_node = copy_node (src); if (!fn_args) fn_args = tgt = new_node; else { TREE_CHAIN (tgt) = new_node; tgt = new_node; } mark_exp_read (tgt); if (generic_lambda_p) { if (DECL_PACK_P (tgt)) { tree a = make_pack_expansion (tgt); if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); PACK_EXPANSION_LOCAL_P (a) = true; CALL_EXPR_ARG (call, ix) = a; } else { tree a = convert_from_reference (tgt); CALL_EXPR_ARG (call, ix) = a; if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); } ++ix; } else vec_safe_push (direct_argvec, tgt); src = TREE_CHAIN (src); } } if (generic_lambda_p) { if (decltype_call) { ++processing_template_decl; fn_result = finish_decltype_type (decltype_call, /*id_expression_or_member_access_p=*/false, tf_warning_or_error); --processing_template_decl; } } else call = build_call_a (callop, direct_argvec->length (), direct_argvec->address ()); CALL_FROM_THUNK_P (call) = 1; tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); /* First build up the conversion op. */ tree rettype = build_pointer_type (stattype); tree name = mangle_conv_op_name_for_type (rettype); tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); tree fntype = build_method_type_directly (thistype, rettype, void_list_node); tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); tree fn = convfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); /* Generic thunk code fails for varargs; we'll complain in mark_used if the conversion op is used. */ if (varargs_function_p (callop)) { DECL_DELETED_FN (fn) = 1; return; } /* Now build up the thunk to be returned. */ name = get_identifier ("_FUN"); tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); fn = statfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_STATIC_FUNCTION_P (fn) = 1; DECL_ARGUMENTS (fn) = fn_args; for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) { /* Avoid duplicate -Wshadow warnings. */ DECL_NAME (arg) = NULL_TREE; DECL_CONTEXT (arg) = fn; } if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); if (nested) push_function_context (); else /* Still increment function_depth so that we don't GC in the middle of an expression. */ ++function_depth; /* Generate the body of the thunk. */ start_preparsed_function (statfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); if (DECL_ONE_ONLY (statfn)) { /* Put the thunk in the same comdat group as the call op. */ cgraph_node::get_create (statfn)->add_to_same_comdat_group (cgraph_node::get_create (callop)); } tree body = begin_function_body (); tree compound_stmt = begin_compound_stmt (0); if (!generic_lambda_p) { set_flags_from_callee (call); if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); } call = convert_from_reference (call); finish_return_stmt (call); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); /* Generate the body of the conversion op. */ start_preparsed_function (convfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); body = begin_function_body (); compound_stmt = begin_compound_stmt (0); /* decl_needed_p needs to see that it's used. */ TREE_USED (statfn) = 1; finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); if (nested) pop_function_context (); else --function_depth; }