void maybe_add_lambda_conv_op (tree type) { bool nested = (cfun != NULL); bool nested_def = decl_function_context (TYPE_MAIN_DECL (type)); tree callop = lambda_function (type); if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) return; if (processing_template_decl) return; bool const generic_lambda_p = (DECL_TEMPLATE_INFO (callop) && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop); if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE) { /* If the op() wasn't instantiated due to errors, give up. */ gcc_assert (errorcount || sorrycount); return; } /* Non-template conversion operators are defined directly with build_call_a and using DIRECT_ARGVEC for arguments (including 'this'). Templates are deferred and the CALL is built in-place. In the case of a deduced return call op, the decltype expression, DECLTYPE_CALL, used as a substitute for the return type is also built in-place. The arguments of DECLTYPE_CALL in the return expression may differ in flags from those in the body CALL. In particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in the body CALL, but not in DECLTYPE_CALL. */ vec<tree, va_gc> *direct_argvec = 0; tree decltype_call = 0, call = 0; tree fn_result = TREE_TYPE (TREE_TYPE (callop)); if (generic_lambda_p) { /* Prepare the dependent member call for the static member function '_FUN' and, potentially, prepare another call to be used in a decltype return expression for a deduced return call op to allow for simple implementation of the conversion operator. */ tree instance = build_nop (type, null_pointer_node); tree objfn = build_min (COMPONENT_REF, NULL_TREE, instance, DECL_NAME (callop), NULL_TREE); int nargs = list_length (DECL_ARGUMENTS (callop)) - 1; call = prepare_op_call (objfn, nargs); if (type_uses_auto (fn_result)) decltype_call = prepare_op_call (objfn, nargs); } else { direct_argvec = make_tree_vector (); direct_argvec->quick_push (build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)), null_pointer_node)); } /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to declare the static member function "_FUN" below. For each arg append to DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated call args (for the template case). If a parameter pack is found, expand it, flagging it as PACK_EXPANSION_LOCAL_P for the body call. */ tree fn_args = NULL_TREE; { int ix = 0; tree src = DECL_CHAIN (DECL_ARGUMENTS (callop)); tree tgt; while (src) { tree new_node = copy_node (src); if (!fn_args) fn_args = tgt = new_node; else { TREE_CHAIN (tgt) = new_node; tgt = new_node; } mark_exp_read (tgt); if (generic_lambda_p) { if (DECL_PACK_P (tgt)) { tree a = make_pack_expansion (tgt); if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); PACK_EXPANSION_LOCAL_P (a) = true; CALL_EXPR_ARG (call, ix) = a; } else { tree a = convert_from_reference (tgt); CALL_EXPR_ARG (call, ix) = a; if (decltype_call) CALL_EXPR_ARG (decltype_call, ix) = copy_node (a); } ++ix; } else vec_safe_push (direct_argvec, tgt); src = TREE_CHAIN (src); } } if (generic_lambda_p) { if (decltype_call) { ++processing_template_decl; fn_result = finish_decltype_type (decltype_call, /*id_expression_or_member_access_p=*/false, tf_warning_or_error); --processing_template_decl; } } else call = build_call_a (callop, direct_argvec->length (), direct_argvec->address ()); CALL_FROM_THUNK_P (call) = 1; tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop)); /* First build up the conversion op. */ tree rettype = build_pointer_type (stattype); tree name = mangle_conv_op_name_for_type (rettype); tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); tree fntype = build_method_type_directly (thistype, rettype, void_list_node); tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype); tree fn = convfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); /* Generic thunk code fails for varargs; we'll complain in mark_used if the conversion op is used. */ if (varargs_function_p (callop)) { DECL_DELETED_FN (fn) = 1; return; } /* Now build up the thunk to be returned. */ name = get_identifier ("_FUN"); tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype); fn = statfn; DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; grokclassfn (type, fn, NO_SPECIAL); set_linkage_according_to_type (type, fn); rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); DECL_IN_AGGR_P (fn) = 1; DECL_ARTIFICIAL (fn) = 1; DECL_NOT_REALLY_EXTERN (fn) = 1; DECL_DECLARED_INLINE_P (fn) = 1; DECL_STATIC_FUNCTION_P (fn) = 1; DECL_ARGUMENTS (fn) = fn_args; for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg)) { /* Avoid duplicate -Wshadow warnings. */ DECL_NAME (arg) = NULL_TREE; DECL_CONTEXT (arg) = fn; } if (nested_def) DECL_INTERFACE_KNOWN (fn) = 1; if (generic_lambda_p) fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop)); add_method (type, fn, NULL_TREE); if (nested) push_function_context (); else /* Still increment function_depth so that we don't GC in the middle of an expression. */ ++function_depth; /* Generate the body of the thunk. */ start_preparsed_function (statfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); if (DECL_ONE_ONLY (statfn)) { /* Put the thunk in the same comdat group as the call op. */ cgraph_node::get_create (statfn)->add_to_same_comdat_group (cgraph_node::get_create (callop)); } tree body = begin_function_body (); tree compound_stmt = begin_compound_stmt (0); if (!generic_lambda_p) { set_flags_from_callee (call); if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error); } call = convert_from_reference (call); finish_return_stmt (call); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); /* Generate the body of the conversion op. */ start_preparsed_function (convfn, NULL_TREE, SF_PRE_PARSED | SF_INCLASS_INLINE); body = begin_function_body (); compound_stmt = begin_compound_stmt (0); /* decl_needed_p needs to see that it's used. */ TREE_USED (statfn) = 1; finish_return_stmt (decay_conversion (statfn, tf_warning_or_error)); finish_compound_stmt (compound_stmt); finish_function_body (body); fn = finish_function (/*inline*/2); if (!generic_lambda_p) expand_or_defer_fn (fn); if (nested) pop_function_context (); else --function_depth; }
tree objcp_begin_compound_stmt (int flags ATTRIBUTE_UNUSED) { return begin_compound_stmt (0); }
static void do_build_assign_ref (tree fndecl) { tree parm = TREE_CHAIN (DECL_ARGUMENTS (fndecl)); tree compound_stmt; compound_stmt = begin_compound_stmt (0); parm = convert_from_reference (parm); if (TYPE_HAS_TRIVIAL_ASSIGN_REF (current_class_type) && is_empty_class (current_class_type)) /* Don't copy the padding byte; it might not have been allocated if *this is a base subobject. */; else if (TYPE_HAS_TRIVIAL_ASSIGN_REF (current_class_type)) { tree t = build2 (MODIFY_EXPR, void_type_node, current_class_ref, parm); finish_expr_stmt (t); } else { tree fields; int cvquals = cp_type_quals (TREE_TYPE (parm)); int i; tree binfo, base_binfo; /* Assign to each of the direct base classes. */ for (binfo = TYPE_BINFO (current_class_type), i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) { tree converted_parm; /* We must convert PARM directly to the base class explicitly since the base class may be ambiguous. */ converted_parm = build_base_path (PLUS_EXPR, parm, base_binfo, 1); /* Call the base class assignment operator. */ finish_expr_stmt (build_special_member_call (current_class_ref, ansi_assopname (NOP_EXPR), build_tree_list (NULL_TREE, converted_parm), base_binfo, LOOKUP_NORMAL | LOOKUP_NONVIRTUAL)); } /* Assign to each of the non-static data members. */ for (fields = TYPE_FIELDS (current_class_type); fields; fields = TREE_CHAIN (fields)) { tree comp = current_class_ref; tree init = parm; tree field = fields; tree expr_type; int quals; if (TREE_CODE (field) != FIELD_DECL || DECL_ARTIFICIAL (field)) continue; expr_type = TREE_TYPE (field); if (CP_TYPE_CONST_P (expr_type)) { error ("non-static const member %q#D, can't use default " "assignment operator", field); continue; } else if (TREE_CODE (expr_type) == REFERENCE_TYPE) { error ("non-static reference member %q#D, can't use " "default assignment operator", field); continue; } if (DECL_NAME (field)) { if (VFIELD_NAME_P (DECL_NAME (field))) continue; } else if (ANON_AGGR_TYPE_P (expr_type) && TYPE_FIELDS (expr_type) != NULL_TREE) /* Just use the field; anonymous types can't have nontrivial copy ctors or assignment ops. */; else continue; comp = build3 (COMPONENT_REF, expr_type, comp, field, NULL_TREE); /* Compute the type of init->field */ quals = cvquals; if (DECL_MUTABLE_P (field)) quals &= ~TYPE_QUAL_CONST; expr_type = cp_build_qualified_type (expr_type, quals); init = build3 (COMPONENT_REF, expr_type, init, field, NULL_TREE); if (DECL_NAME (field)) init = build_modify_expr (comp, NOP_EXPR, init); else init = build2 (MODIFY_EXPR, TREE_TYPE (comp), comp, init); finish_expr_stmt (init); } } finish_return_stmt (current_class_ref); finish_compound_stmt (compound_stmt); }
void synthesize_method (tree fndecl) { bool nested = (current_function_decl != NULL_TREE); tree context = decl_function_context (fndecl); bool need_body = true; tree stmt; location_t save_input_location = input_location; int error_count = errorcount; int warning_count = warningcount; /* Reset the source location, we might have been previously deferred, and thus have saved where we were first needed. */ DECL_SOURCE_LOCATION (fndecl) = DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (fndecl))); /* If we've been asked to synthesize a clone, just synthesize the cloned function instead. Doing so will automatically fill in the body for the clone. */ if (DECL_CLONED_FUNCTION_P (fndecl)) fndecl = DECL_CLONED_FUNCTION (fndecl); /* We may be in the middle of deferred access check. Disable it now. */ push_deferring_access_checks (dk_no_deferred); if (! context) push_to_top_level (); else if (nested) push_function_context_to (context); input_location = DECL_SOURCE_LOCATION (fndecl); start_preparsed_function (fndecl, NULL_TREE, SF_DEFAULT | SF_PRE_PARSED); stmt = begin_function_body (); if (DECL_OVERLOADED_OPERATOR_P (fndecl) == NOP_EXPR) { do_build_assign_ref (fndecl); need_body = false; } else if (DECL_CONSTRUCTOR_P (fndecl)) { tree arg_chain = FUNCTION_FIRST_USER_PARMTYPE (fndecl); if (arg_chain != void_list_node) do_build_copy_constructor (fndecl); else finish_mem_initializers (NULL_TREE); } /* If we haven't yet generated the body of the function, just generate an empty compound statement. */ if (need_body) { tree compound_stmt; compound_stmt = begin_compound_stmt (BCS_FN_BODY); finish_compound_stmt (compound_stmt); } finish_function_body (stmt); expand_or_defer_fn (finish_function (0)); input_location = save_input_location; if (! context) pop_from_top_level (); else if (nested) pop_function_context_from (context); pop_deferring_access_checks (); if (error_count != errorcount || warning_count != warningcount) inform ("%Hsynthesized method %qD first required here ", &input_location, fndecl); }
void synthesize_method (tree fndecl) { bool nested = (current_function_decl != NULL_TREE); tree context = decl_function_context (fndecl); bool need_body = true; tree stmt; if (at_eof) import_export_decl (fndecl); /* If we've been asked to synthesize a clone, just synthesize the cloned function instead. Doing so will automatically fill in the body for the clone. */ if (DECL_CLONED_FUNCTION_P (fndecl)) { synthesize_method (DECL_CLONED_FUNCTION (fndecl)); return; } /* We may be in the middle of deferred access check. Disable it now. */ push_deferring_access_checks (dk_no_deferred); if (! context) push_to_top_level (); else if (nested) push_function_context_to (context); /* Put the function definition at the position where it is needed, rather than within the body of the class. That way, an error during the generation of the implicit body points at the place where the attempt to generate the function occurs, giving the user a hint as to why we are attempting to generate the function. */ DECL_SOURCE_LOCATION (fndecl) = input_location; interface_unknown = 1; start_function (NULL_TREE, fndecl, NULL_TREE, SF_DEFAULT | SF_PRE_PARSED); clear_last_expr (); stmt = begin_function_body (); if (DECL_OVERLOADED_OPERATOR_P (fndecl) == NOP_EXPR) { do_build_assign_ref (fndecl); need_body = false; } else if (DECL_CONSTRUCTOR_P (fndecl)) { tree arg_chain = FUNCTION_FIRST_USER_PARMTYPE (fndecl); if (arg_chain != void_list_node) do_build_copy_constructor (fndecl); else if (TYPE_NEEDS_CONSTRUCTING (current_class_type)) finish_mem_initializers (NULL_TREE); } /* If we haven't yet generated the body of the function, just generate an empty compound statement. */ if (need_body) { tree compound_stmt; compound_stmt = begin_compound_stmt (/*has_no_scope=*/false); finish_compound_stmt (compound_stmt); } finish_function_body (stmt); expand_or_defer_fn (finish_function (0)); extract_interface_info (); if (! context) pop_from_top_level (); else if (nested) pop_function_context_from (context); pop_deferring_access_checks (); }