tree cp_expr_size (const_tree exp) { tree type = TREE_TYPE (exp); if (CLASS_TYPE_P (type)) { /* The back end should not be interested in the size of an expression of a type with both of these set; all copies of such types must go through a constructor or assignment op. */ if (!TYPE_HAS_COMPLEX_INIT_REF (type) || !TYPE_HAS_COMPLEX_ASSIGN_REF (type) /* But storing a CONSTRUCTOR isn't a copy. */ || TREE_CODE (exp) == CONSTRUCTOR /* And, the gimplifier will sometimes make a copy of an aggregate. In particular, for a case like: struct S { S(); }; struct X { int a; S s; }; X x = { 0 }; the gimplifier will create a temporary with static storage duration, perform static initialization of the temporary, and then copy the result. Since the "s" subobject is never constructed, this is a valid transformation. */ || CP_AGGREGATE_TYPE_P (type)) /* This would be wrong for a type with virtual bases. */ return (is_really_empty_class (type) ? size_zero_node : CLASSTYPE_SIZE_UNIT (type)); else return NULL_TREE; } else /* Use the default code. */ return lhd_expr_size (exp); }
int cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) { int saved_stmts_are_full_exprs_p = 0; enum tree_code code = TREE_CODE (*expr_p); enum gimplify_status ret; if (STATEMENT_CODE_P (code)) { saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p (); current_stmt_tree ()->stmts_are_full_exprs_p = STMT_IS_FULL_EXPR_P (*expr_p); } switch (code) { case PTRMEM_CST: *expr_p = cplus_expand_constant (*expr_p); ret = GS_OK; break; case AGGR_INIT_EXPR: simplify_aggr_init_expr (expr_p); ret = GS_OK; break; case VEC_INIT_EXPR: { location_t loc = input_location; tree init = VEC_INIT_EXPR_INIT (*expr_p); int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); gcc_assert (EXPR_HAS_LOCATION (*expr_p)); input_location = EXPR_LOCATION (*expr_p); *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE, init, VEC_INIT_EXPR_VALUE_INIT (*expr_p), from_array, tf_warning_or_error); cp_genericize_tree (expr_p); ret = GS_OK; input_location = loc; } break; case THROW_EXPR: /* FIXME communicate throw type to back end, probably by moving THROW_EXPR into ../tree.def. */ *expr_p = TREE_OPERAND (*expr_p, 0); ret = GS_OK; break; case MUST_NOT_THROW_EXPR: ret = gimplify_must_not_throw_expr (expr_p, pre_p); break; /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the LHS of an assignment might also be involved in the RHS, as in bug 25979. */ case INIT_EXPR: if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); cp_gimplify_init_expr (expr_p); if (TREE_CODE (*expr_p) != INIT_EXPR) return GS_OK; /* Otherwise fall through. */ case MODIFY_EXPR: { if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); /* If the back end isn't clever enough to know that the lhs and rhs types are the same, add an explicit conversion. */ tree op0 = TREE_OPERAND (*expr_p, 0); tree op1 = TREE_OPERAND (*expr_p, 1); if (!error_operand_p (op0) && !error_operand_p (op1) && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0)) || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1))) && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))) TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op0), op1); else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1) || (TREE_CODE (op1) == CONSTRUCTOR && CONSTRUCTOR_NELTS (op1) == 0 && !TREE_CLOBBER_P (op1)) || (TREE_CODE (op1) == CALL_EXPR && !CALL_EXPR_RETURN_SLOT_OPT (op1))) && is_really_empty_class (TREE_TYPE (op0))) { /* Remove any copies of empty classes. We check that the RHS has a simple form so that TARGET_EXPRs and non-empty CONSTRUCTORs get reduced properly, and we leave the return slot optimization alone because it isn't a copy (FIXME so it shouldn't be represented as one). Also drop volatile variables on the RHS to avoid infinite recursion from gimplify_expr trying to load the value. */ if (!TREE_SIDE_EFFECTS (op1) || (DECL_P (op1) && TREE_THIS_VOLATILE (op1))) *expr_p = op0; else if (TREE_CODE (op1) == MEM_REF && TREE_THIS_VOLATILE (op1)) { /* Similarly for volatile MEM_REFs on the RHS. */ if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0))) *expr_p = op0; else *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), TREE_OPERAND (op1, 0), op0); } else *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), op0, op1); } } ret = GS_OK; break; case EMPTY_CLASS_EXPR: /* We create an empty CONSTRUCTOR with RECORD_TYPE. */ *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL); ret = GS_OK; break; case BASELINK: *expr_p = BASELINK_FUNCTIONS (*expr_p); ret = GS_OK; break; case TRY_BLOCK: genericize_try_block (expr_p); ret = GS_OK; break; case HANDLER: genericize_catch_block (expr_p); ret = GS_OK; break; case EH_SPEC_BLOCK: genericize_eh_spec_block (expr_p); ret = GS_OK; break; case USING_STMT: gcc_unreachable (); case FOR_STMT: case WHILE_STMT: case DO_STMT: case SWITCH_STMT: case CONTINUE_STMT: case BREAK_STMT: gcc_unreachable (); case OMP_FOR: case OMP_SIMD: case OMP_DISTRIBUTE: ret = cp_gimplify_omp_for (expr_p, pre_p); break; case EXPR_STMT: gimplify_expr_stmt (expr_p); ret = GS_OK; break; case UNARY_PLUS_EXPR: { tree arg = TREE_OPERAND (*expr_p, 0); tree type = TREE_TYPE (*expr_p); *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg) : arg; ret = GS_OK; } break; case CILK_SPAWN_STMT: gcc_assert (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p)); /* If errors are seen, then just process it as a CALL_EXPR. */ if (!seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); case CALL_EXPR: if (fn_contains_cilk_spawn_p (cfun) && cilk_detect_spawn_and_unwrap (expr_p) && !seen_error ()) return (enum gimplify_status) gimplify_cilk_spawn (expr_p); default: ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p); break; } /* Restore saved state. */ if (STATEMENT_CODE_P (code)) current_stmt_tree ()->stmts_are_full_exprs_p = saved_stmts_are_full_exprs_p; return ret; }