Esempio n. 1
0
void
objcp_finish_function (void)
{
  /* The C++ flavor of 'finish_function' does not generate RTL -- one has
     to call 'expand_or_defer_fn' to do that.  */
  expand_or_defer_fn (finish_function (0));
}
Esempio n. 2
0
static void
finish_cdtor (tree body)
{
  tree scope;
  tree block;

  scope = add_scope_stmt (/*begin_p=*/0, /*partial_p=*/0);
  block = poplevel (0, 0, 0);
  SCOPE_STMT_BLOCK (TREE_PURPOSE (scope)) = block;
  SCOPE_STMT_BLOCK (TREE_VALUE (scope)) = block;

  RECHAIN_STMTS (body, COMPOUND_BODY (body));

  finish_function ();
}
Esempio n. 3
0
void
maybe_add_lambda_conv_op (tree type)
{
  bool nested = (cfun != NULL);
  bool nested_def = decl_function_context (TYPE_MAIN_DECL (type));
  tree callop = lambda_function (type);

  if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
    return;

  if (processing_template_decl)
    return;

  bool const generic_lambda_p
    = (DECL_TEMPLATE_INFO (callop)
    && DECL_TEMPLATE_RESULT (DECL_TI_TEMPLATE (callop)) == callop);

  if (!generic_lambda_p && DECL_INITIAL (callop) == NULL_TREE)
    {
      /* If the op() wasn't instantiated due to errors, give up.  */
      gcc_assert (errorcount || sorrycount);
      return;
    }

  /* Non-template conversion operators are defined directly with build_call_a
     and using DIRECT_ARGVEC for arguments (including 'this').  Templates are
     deferred and the CALL is built in-place.  In the case of a deduced return
     call op, the decltype expression, DECLTYPE_CALL, used as a substitute for
     the return type is also built in-place.  The arguments of DECLTYPE_CALL in
     the return expression may differ in flags from those in the body CALL.  In
     particular, parameter pack expansions are marked PACK_EXPANSION_LOCAL_P in
     the body CALL, but not in DECLTYPE_CALL.  */

  vec<tree, va_gc> *direct_argvec = 0;
  tree decltype_call = 0, call = 0;
  tree fn_result = TREE_TYPE (TREE_TYPE (callop));

  if (generic_lambda_p)
    {
      /* Prepare the dependent member call for the static member function
	 '_FUN' and, potentially, prepare another call to be used in a decltype
	 return expression for a deduced return call op to allow for simple
	 implementation of the conversion operator.  */

      tree instance = build_nop (type, null_pointer_node);
      tree objfn = build_min (COMPONENT_REF, NULL_TREE,
			      instance, DECL_NAME (callop), NULL_TREE);
      int nargs = list_length (DECL_ARGUMENTS (callop)) - 1;

      call = prepare_op_call (objfn, nargs);
      if (type_uses_auto (fn_result))
	decltype_call = prepare_op_call (objfn, nargs);
    }
  else
    {
      direct_argvec = make_tree_vector ();
      direct_argvec->quick_push (build1 (NOP_EXPR,
					 TREE_TYPE (DECL_ARGUMENTS (callop)),
					 null_pointer_node));
    }

  /* Copy CALLOP's argument list (as per 'copy_list') as FN_ARGS in order to
     declare the static member function "_FUN" below.  For each arg append to
     DIRECT_ARGVEC (for the non-template case) or populate the pre-allocated
     call args (for the template case).  If a parameter pack is found, expand
     it, flagging it as PACK_EXPANSION_LOCAL_P for the body call.  */

  tree fn_args = NULL_TREE;
  {
    int ix = 0;
    tree src = DECL_CHAIN (DECL_ARGUMENTS (callop));
    tree tgt;

    while (src)
      {
	tree new_node = copy_node (src);

	if (!fn_args)
	  fn_args = tgt = new_node;
	else
	  {
	    TREE_CHAIN (tgt) = new_node;
	    tgt = new_node;
	  }

	mark_exp_read (tgt);

	if (generic_lambda_p)
	  {
	    if (DECL_PACK_P (tgt))
	      {
		tree a = make_pack_expansion (tgt);
		if (decltype_call)
		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
		PACK_EXPANSION_LOCAL_P (a) = true;
		CALL_EXPR_ARG (call, ix) = a;
	      }
	    else
	      {
		tree a = convert_from_reference (tgt);
		CALL_EXPR_ARG (call, ix) = a;
		if (decltype_call)
		  CALL_EXPR_ARG (decltype_call, ix) = copy_node (a);
	      }
	    ++ix;
	  }
	else
	  vec_safe_push (direct_argvec, tgt);

	src = TREE_CHAIN (src);
      }
  }


  if (generic_lambda_p)
    {
      if (decltype_call)
	{
	  ++processing_template_decl;
	  fn_result = finish_decltype_type
	    (decltype_call, /*id_expression_or_member_access_p=*/false,
	     tf_warning_or_error);
	  --processing_template_decl;
	}
    }
  else
    call = build_call_a (callop,
			 direct_argvec->length (),
			 direct_argvec->address ());

  CALL_FROM_THUNK_P (call) = 1;

  tree stattype = build_function_type (fn_result, FUNCTION_ARG_CHAIN (callop));

  /* First build up the conversion op.  */

  tree rettype = build_pointer_type (stattype);
  tree name = mangle_conv_op_name_for_type (rettype);
  tree thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
  tree fntype = build_method_type_directly (thistype, rettype, void_list_node);
  tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
  tree fn = convfn;
  DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);

  if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
      && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
    DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;

  SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
  grokclassfn (type, fn, NO_SPECIAL);
  set_linkage_according_to_type (type, fn);
  rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
  DECL_IN_AGGR_P (fn) = 1;
  DECL_ARTIFICIAL (fn) = 1;
  DECL_NOT_REALLY_EXTERN (fn) = 1;
  DECL_DECLARED_INLINE_P (fn) = 1;
  DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
  if (nested_def)
    DECL_INTERFACE_KNOWN (fn) = 1;

  if (generic_lambda_p)
    fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));

  add_method (type, fn, NULL_TREE);

  /* Generic thunk code fails for varargs; we'll complain in mark_used if
     the conversion op is used.  */
  if (varargs_function_p (callop))
    {
      DECL_DELETED_FN (fn) = 1;
      return;
    }

  /* Now build up the thunk to be returned.  */

  name = get_identifier ("_FUN");
  tree statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
  fn = statfn;
  DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
  if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
      && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
    DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
  grokclassfn (type, fn, NO_SPECIAL);
  set_linkage_according_to_type (type, fn);
  rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
  DECL_IN_AGGR_P (fn) = 1;
  DECL_ARTIFICIAL (fn) = 1;
  DECL_NOT_REALLY_EXTERN (fn) = 1;
  DECL_DECLARED_INLINE_P (fn) = 1;
  DECL_STATIC_FUNCTION_P (fn) = 1;
  DECL_ARGUMENTS (fn) = fn_args;
  for (tree arg = fn_args; arg; arg = DECL_CHAIN (arg))
    {
      /* Avoid duplicate -Wshadow warnings.  */
      DECL_NAME (arg) = NULL_TREE;
      DECL_CONTEXT (arg) = fn;
    }
  if (nested_def)
    DECL_INTERFACE_KNOWN (fn) = 1;

  if (generic_lambda_p)
    fn = add_inherited_template_parms (fn, DECL_TI_TEMPLATE (callop));

  add_method (type, fn, NULL_TREE);

  if (nested)
    push_function_context ();
  else
    /* Still increment function_depth so that we don't GC in the
       middle of an expression.  */
    ++function_depth;

  /* Generate the body of the thunk.  */

  start_preparsed_function (statfn, NULL_TREE,
			    SF_PRE_PARSED | SF_INCLASS_INLINE);
  if (DECL_ONE_ONLY (statfn))
    {
      /* Put the thunk in the same comdat group as the call op.  */
      cgraph_node::get_create (statfn)->add_to_same_comdat_group
	(cgraph_node::get_create (callop));
    }
  tree body = begin_function_body ();
  tree compound_stmt = begin_compound_stmt (0);
  if (!generic_lambda_p)
    {
      set_flags_from_callee (call);
      if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
	call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
    }
  call = convert_from_reference (call);
  finish_return_stmt (call);

  finish_compound_stmt (compound_stmt);
  finish_function_body (body);

  fn = finish_function (/*inline*/2);
  if (!generic_lambda_p)
    expand_or_defer_fn (fn);

  /* Generate the body of the conversion op.  */

  start_preparsed_function (convfn, NULL_TREE,
			    SF_PRE_PARSED | SF_INCLASS_INLINE);
  body = begin_function_body ();
  compound_stmt = begin_compound_stmt (0);

  /* decl_needed_p needs to see that it's used.  */
  TREE_USED (statfn) = 1;
  finish_return_stmt (decay_conversion (statfn, tf_warning_or_error));

  finish_compound_stmt (compound_stmt);
  finish_function_body (body);

  fn = finish_function (/*inline*/2);
  if (!generic_lambda_p)
    expand_or_defer_fn (fn);

  if (nested)
    pop_function_context ();
  else
    --function_depth;
}
Esempio n. 4
0
void
synthesize_method (tree fndecl)
{
    bool nested = (current_function_decl != NULL_TREE);
    tree context = decl_function_context (fndecl);
    bool need_body = true;
    tree stmt;
    location_t save_input_location = input_location;
    int error_count = errorcount;
    int warning_count = warningcount;

    /* Reset the source location, we might have been previously
       deferred, and thus have saved where we were first needed.  */
    DECL_SOURCE_LOCATION (fndecl)
        = DECL_SOURCE_LOCATION (TYPE_NAME (DECL_CONTEXT (fndecl)));

    /* If we've been asked to synthesize a clone, just synthesize the
       cloned function instead.  Doing so will automatically fill in the
       body for the clone.  */
    if (DECL_CLONED_FUNCTION_P (fndecl))
        fndecl = DECL_CLONED_FUNCTION (fndecl);

    /* We may be in the middle of deferred access check.  Disable
       it now.  */
    push_deferring_access_checks (dk_no_deferred);

    if (! context)
        push_to_top_level ();
    else if (nested)
        push_function_context_to (context);

    input_location = DECL_SOURCE_LOCATION (fndecl);

    start_preparsed_function (fndecl, NULL_TREE, SF_DEFAULT | SF_PRE_PARSED);
    stmt = begin_function_body ();

    if (DECL_OVERLOADED_OPERATOR_P (fndecl) == NOP_EXPR)
    {
        do_build_assign_ref (fndecl);
        need_body = false;
    }
    else if (DECL_CONSTRUCTOR_P (fndecl))
    {
        tree arg_chain = FUNCTION_FIRST_USER_PARMTYPE (fndecl);
        if (arg_chain != void_list_node)
            do_build_copy_constructor (fndecl);
        else
            finish_mem_initializers (NULL_TREE);
    }

    /* If we haven't yet generated the body of the function, just
       generate an empty compound statement.  */
    if (need_body)
    {
        tree compound_stmt;
        compound_stmt = begin_compound_stmt (BCS_FN_BODY);
        finish_compound_stmt (compound_stmt);
    }

    finish_function_body (stmt);
    expand_or_defer_fn (finish_function (0));

    input_location = save_input_location;

    if (! context)
        pop_from_top_level ();
    else if (nested)
        pop_function_context_from (context);

    pop_deferring_access_checks ();

    if (error_count != errorcount || warning_count != warningcount)
        inform ("%Hsynthesized method %qD first required here ",
                &input_location, fndecl);
}
Esempio n. 5
0
void
use_thunk (tree thunk_fndecl, bool emit_p)
{
    tree a, t, function, alias;
    tree virtual_offset;
    HOST_WIDE_INT fixed_offset, virtual_value;
    bool this_adjusting = DECL_THIS_THUNK_P (thunk_fndecl);

    /* We should have called finish_thunk to give it a name.  */
    gcc_assert (DECL_NAME (thunk_fndecl));

    /* We should never be using an alias, always refer to the
       aliased thunk.  */
    gcc_assert (!THUNK_ALIAS (thunk_fndecl));

    if (TREE_ASM_WRITTEN (thunk_fndecl))
        return;

    function = THUNK_TARGET (thunk_fndecl);
    if (DECL_RESULT (thunk_fndecl))
        /* We already turned this thunk into an ordinary function.
           There's no need to process this thunk again.  */
        return;

    if (DECL_THUNK_P (function))
        /* The target is itself a thunk, process it now.  */
        use_thunk (function, emit_p);

    /* Thunks are always addressable; they only appear in vtables.  */
    TREE_ADDRESSABLE (thunk_fndecl) = 1;

    /* Figure out what function is being thunked to.  It's referenced in
       this translation unit.  */
    TREE_ADDRESSABLE (function) = 1;
    mark_used (function);
    if (!emit_p)
        return;

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function))
        alias = make_alias_for_thunk (function);
    else
        alias = function;

    fixed_offset = THUNK_FIXED_OFFSET (thunk_fndecl);
    virtual_offset = THUNK_VIRTUAL_OFFSET (thunk_fndecl);

    if (virtual_offset)
    {
        if (!this_adjusting)
            virtual_offset = BINFO_VPTR_FIELD (virtual_offset);
        virtual_value = tree_low_cst (virtual_offset, /*pos=*/0);
        gcc_assert (virtual_value);
    }
    else
        virtual_value = 0;

    /* And, if we need to emit the thunk, it's used.  */
    mark_used (thunk_fndecl);
    /* This thunk is actually defined.  */
    DECL_EXTERNAL (thunk_fndecl) = 0;
    /* The linkage of the function may have changed.  FIXME in linkage
       rewrite.  */
    TREE_PUBLIC (thunk_fndecl) = TREE_PUBLIC (function);
    DECL_VISIBILITY (thunk_fndecl) = DECL_VISIBILITY (function);
    DECL_VISIBILITY_SPECIFIED (thunk_fndecl)
        = DECL_VISIBILITY_SPECIFIED (function);
    if (DECL_ONE_ONLY (function))
        make_decl_one_only (thunk_fndecl);

    if (flag_syntax_only)
    {
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
        return;
    }

    push_to_top_level ();

    if (TARGET_USE_LOCAL_THUNK_ALIAS_P (function)
            && targetm.have_named_sections)
    {
        resolve_unique_section (function, 0, flag_function_sections);

        if (DECL_SECTION_NAME (function) != NULL && DECL_ONE_ONLY (function))
        {
            resolve_unique_section (thunk_fndecl, 0, flag_function_sections);

            /* Output the thunk into the same section as function.  */
            DECL_SECTION_NAME (thunk_fndecl) = DECL_SECTION_NAME (function);
        }
    }

    /* The back-end expects DECL_INITIAL to contain a BLOCK, so we
       create one.  */
    DECL_INITIAL (thunk_fndecl) = make_node (BLOCK);

    /* Set up cloned argument trees for the thunk.  */
    t = NULL_TREE;
    for (a = DECL_ARGUMENTS (function); a; a = TREE_CHAIN (a))
    {
        tree x = copy_node (a);
        TREE_CHAIN (x) = t;
        DECL_CONTEXT (x) = thunk_fndecl;
        SET_DECL_RTL (x, NULL_RTX);
        DECL_HAS_VALUE_EXPR_P (x) = 0;
        t = x;
    }
    a = nreverse (t);
    DECL_ARGUMENTS (thunk_fndecl) = a;
    BLOCK_VARS (DECL_INITIAL (thunk_fndecl)) = a;

    if (this_adjusting
            && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
                    virtual_value, alias))
    {
        const char *fnname;
        current_function_decl = thunk_fndecl;
        DECL_RESULT (thunk_fndecl)
            = build_decl (RESULT_DECL, 0, integer_type_node);
        fnname = XSTR (XEXP (DECL_RTL (thunk_fndecl), 0), 0);
        init_function_start (thunk_fndecl);
        current_function_is_thunk = 1;
        assemble_start_function (thunk_fndecl, fnname);

        targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
                                         fixed_offset, virtual_value, alias);

        assemble_end_function (thunk_fndecl, fnname);
        init_insn_lengths ();
        current_function_decl = 0;
        cfun = 0;
        TREE_ASM_WRITTEN (thunk_fndecl) = 1;
    }
    else
    {
        /* If this is a covariant thunk, or we don't have the necessary
        code for efficient thunks, generate a thunk function that
         just makes a call to the real function.  Unfortunately, this
         doesn't work for varargs.  */

        if (varargs_function_p (function))
            error ("generic thunk code fails for method %q#D which uses %<...%>",
                   function);

        DECL_RESULT (thunk_fndecl) = NULL_TREE;

        start_preparsed_function (thunk_fndecl, NULL_TREE, SF_PRE_PARSED);
        /* We don't bother with a body block for thunks.  */

        /* There's no need to check accessibility inside the thunk body.  */
        push_deferring_access_checks (dk_no_check);

        t = a;
        if (this_adjusting)
            t = thunk_adjust (t, /*this_adjusting=*/1,
                              fixed_offset, virtual_offset);

        /* Build up the call to the real function.  */
        t = tree_cons (NULL_TREE, t, NULL_TREE);
        for (a = TREE_CHAIN (a); a; a = TREE_CHAIN (a))
            t = tree_cons (NULL_TREE, a, t);
        t = nreverse (t);
        t = build_call (alias, t);
        CALL_FROM_THUNK_P (t) = 1;

        if (VOID_TYPE_P (TREE_TYPE (t)))
            finish_expr_stmt (t);
        else
        {
            if (!this_adjusting)
            {
                tree cond = NULL_TREE;

                if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
                {
                    /* If the return type is a pointer, we need to
                       protect against NULL.  We know there will be an
                       adjustment, because that's why we're emitting a
                       thunk.  */
                    t = save_expr (t);
                    cond = cp_convert (boolean_type_node, t);
                }

                t = thunk_adjust (t, /*this_adjusting=*/0,
                                  fixed_offset, virtual_offset);
                if (cond)
                    t = build3 (COND_EXPR, TREE_TYPE (t), cond, t,
                                cp_convert (TREE_TYPE (t), integer_zero_node));
            }
            if (IS_AGGR_TYPE (TREE_TYPE (t)))
                t = build_cplus_new (TREE_TYPE (t), t);
            finish_return_stmt (t);
        }

        /* Since we want to emit the thunk, we explicitly mark its name as
        referenced.  */
        mark_decl_referenced (thunk_fndecl);

        /* But we don't want debugging information about it.  */
        DECL_IGNORED_P (thunk_fndecl) = 1;

        /* Re-enable access control.  */
        pop_deferring_access_checks ();

        thunk_fndecl = finish_function (0);
        tree_lowering_passes (thunk_fndecl);
        expand_body (thunk_fndecl);
    }

    pop_from_top_level ();
}
Esempio n. 6
0
bool
maybe_clone_body (tree fn)
{
  tree clone;
  bool first = true;
/* APPLE LOCAL begin ARM structor thunks */
  tree clone_to_call;
  struct clone_info info;
/* APPLE LOCAL end ARM structor thunks */

  /* We only clone constructors and destructors.  */
  if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
      && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
    return 0;

  /* Emit the DWARF1 abstract instance.  */
  (*debug_hooks->deferred_inline_function) (fn);

/* APPLE LOCAL begin ARM structor thunks */
  /* Figure out whether we can use the 'thunk' implementation,
     and if so on which clones. */
  info.next_clone = 0;
  info.which_thunks_ok = compute_use_thunks (fn);
/* APPLE LOCAL end ARM structor thunks */

  /* We know that any clones immediately follow FN in the TYPE_METHODS
     list.  */
  push_to_top_level ();
  FOR_EACH_CLONE (clone, fn)
    {
      tree parm;
      tree clone_parm;
      int parmno;
      splay_tree decl_map;

      /* Update CLONE's source position information to match FN's.  */
      DECL_SOURCE_LOCATION (clone) = DECL_SOURCE_LOCATION (fn);
      DECL_INLINE (clone) = DECL_INLINE (fn);
      DECL_DECLARED_INLINE_P (clone) = DECL_DECLARED_INLINE_P (fn);
      /* LLVM LOCAL begin inlinehint attribute */
      DECL_EXPLICIT_INLINE_P (clone) = DECL_EXPLICIT_INLINE_P (fn);
      /* LLVM LOCAL end inlinehint attribute */
      DECL_COMDAT (clone) = DECL_COMDAT (fn);
      DECL_WEAK (clone) = DECL_WEAK (fn);
      DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
      DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
      DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
      DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
      DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
      DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
      TREE_PUBLIC (clone) = TREE_PUBLIC (fn);
      DECL_VISIBILITY (clone) = DECL_VISIBILITY (fn);
      DECL_VISIBILITY_SPECIFIED (clone) = DECL_VISIBILITY_SPECIFIED (fn);

      /* Adjust the parameter names and locations.  */
      parm = DECL_ARGUMENTS (fn);
      clone_parm = DECL_ARGUMENTS (clone);
      /* Update the `this' parameter, which is always first.  */
      update_cloned_parm (parm, clone_parm, first);
      parm = TREE_CHAIN (parm);
      clone_parm = TREE_CHAIN (clone_parm);
      if (DECL_HAS_IN_CHARGE_PARM_P (fn))
	parm = TREE_CHAIN (parm);
      if (DECL_HAS_VTT_PARM_P (fn))
	parm = TREE_CHAIN (parm);
      if (DECL_HAS_VTT_PARM_P (clone))
	clone_parm = TREE_CHAIN (clone_parm);
      for (; parm;
	   parm = TREE_CHAIN (parm), clone_parm = TREE_CHAIN (clone_parm))
	/* Update this parameter.  */
	update_cloned_parm (parm, clone_parm, first);

      /* Start processing the function.  */
      start_preparsed_function (clone, NULL_TREE, SF_PRE_PARSED);

      /* Remap the parameters.  */
      decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
      for (parmno = 0,
	     parm = DECL_ARGUMENTS (fn),
	     clone_parm = DECL_ARGUMENTS (clone);
	   parm;
	   ++parmno,
	     parm = TREE_CHAIN (parm))
	{
	  /* Map the in-charge parameter to an appropriate constant.  */
	  if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
	    {
	      tree in_charge;
	      in_charge = in_charge_arg_for_name (DECL_NAME (clone));
	      splay_tree_insert (decl_map,
				 (splay_tree_key) parm,
				 (splay_tree_value) in_charge);
	      /* APPLE LOCAL ARM structor thunks */
	      info.in_charge_value [info.next_clone] = in_charge;
	    }
	  else if (DECL_ARTIFICIAL (parm)
		   && DECL_NAME (parm) == vtt_parm_identifier)
	    {
	      /* For a subobject constructor or destructor, the next
		 argument is the VTT parameter.  Remap the VTT_PARM
		 from the CLONE to this parameter.  */
	      if (DECL_HAS_VTT_PARM_P (clone))
		{
		  DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
		  splay_tree_insert (decl_map,
				     (splay_tree_key) parm,
				     (splay_tree_value) clone_parm);
		  clone_parm = TREE_CHAIN (clone_parm);
		}
	      /* Otherwise, map the VTT parameter to `NULL'.  */
	      else
		{
		  splay_tree_insert (decl_map,
				     (splay_tree_key) parm,
				     (splay_tree_value) null_pointer_node);
		}
	    }
	  /* Map other parameters to their equivalents in the cloned
	     function.  */
	  else
	    {
	      splay_tree_insert (decl_map,
				 (splay_tree_key) parm,
				 (splay_tree_value) clone_parm);
	      clone_parm = TREE_CHAIN (clone_parm);
	    }
	}

      if (targetm.cxx.cdtor_returns_this ())
	{
	  parm = DECL_RESULT (fn);
	  clone_parm = DECL_RESULT (clone);
	  splay_tree_insert (decl_map, (splay_tree_key) parm,
			     (splay_tree_value) clone_parm);
	}
      /* APPLE LOCAL begin ARM structor thunks */
      clone_to_call = find_earlier_clone (&info);
      if (clone_to_call)
	/* Bodies are identical; replace later one with call to an
	   earlier one. */
	thunk_body (clone, fn, clone_to_call);
      else
	/* Clone the body.  */
	clone_body (clone, fn, decl_map);
      /* APPLE LOCAL end ARM structor thunks */

      /* The clone can throw iff the original function can throw.  */
      cp_function_chain->can_throw = !TREE_NOTHROW (fn);

      /* Now, expand this function into RTL, if appropriate.  */
      finish_function (0);
      BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
      expand_or_defer_fn (clone);
      first = false;
      /* APPLE LOCAL begin ARM structor thunks */
      info.clones [info.next_clone] = clone;
      info.next_clone++;
      /* APPLE LOCAL end ARM structor thunks */
    }
Esempio n. 7
0
void
synthesize_method (tree fndecl)
{
  bool nested = (current_function_decl != NULL_TREE);
  tree context = decl_function_context (fndecl);
  bool need_body = true;
  tree stmt;

  if (at_eof)
    import_export_decl (fndecl);

  /* If we've been asked to synthesize a clone, just synthesize the
     cloned function instead.  Doing so will automatically fill in the
     body for the clone.  */
  if (DECL_CLONED_FUNCTION_P (fndecl))
    {
      synthesize_method (DECL_CLONED_FUNCTION (fndecl));
      return;
    }

  /* We may be in the middle of deferred access check.  Disable
     it now.  */
  push_deferring_access_checks (dk_no_deferred);

  if (! context)
    push_to_top_level ();
  else if (nested)
    push_function_context_to (context);

  /* Put the function definition at the position where it is needed,
     rather than within the body of the class.  That way, an error
     during the generation of the implicit body points at the place
     where the attempt to generate the function occurs, giving the
     user a hint as to why we are attempting to generate the
     function.  */
  DECL_SOURCE_LOCATION (fndecl) = input_location;

  interface_unknown = 1;
  start_function (NULL_TREE, fndecl, NULL_TREE, SF_DEFAULT | SF_PRE_PARSED);
  clear_last_expr ();
  stmt = begin_function_body ();

  if (DECL_OVERLOADED_OPERATOR_P (fndecl) == NOP_EXPR)
    {
      do_build_assign_ref (fndecl);
      need_body = false;
    }
  else if (DECL_CONSTRUCTOR_P (fndecl))
    {
      tree arg_chain = FUNCTION_FIRST_USER_PARMTYPE (fndecl);
      if (arg_chain != void_list_node)
	do_build_copy_constructor (fndecl);
      else if (TYPE_NEEDS_CONSTRUCTING (current_class_type))
	finish_mem_initializers (NULL_TREE);
    }

  /* If we haven't yet generated the body of the function, just
     generate an empty compound statement.  */
  if (need_body)
    {
      tree compound_stmt;
      compound_stmt = begin_compound_stmt (/*has_no_scope=*/false);
      finish_compound_stmt (compound_stmt);
    }

  finish_function_body (stmt);
  expand_or_defer_fn (finish_function (0));

  extract_interface_info ();
  if (! context)
    pop_from_top_level ();
  else if (nested)
    pop_function_context_from (context);

  pop_deferring_access_checks ();
}