void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; poly_int64 bitpos, bitsize, bytepos; machine_mode mode; int unsignedp, reversep, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, -1); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, wi::to_widest (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, -1); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, -1); aff_combination_add_cst (comb, -1); return; case ADDR_EXPR: /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) { expr = TREE_OPERAND (expr, 0); tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; } core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &reversep, &volatilep); if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)) break; aff_combination_const (comb, type, bytepos); if (TREE_CODE (core) == MEM_REF) { tree mem_offset = TREE_OPERAND (core, 1); aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset)); core = TREE_OPERAND (core, 0); } else core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, 1); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; case MEM_REF: if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), type, comb); else if (integer_zerop (TREE_OPERAND (expr, 1))) { aff_combination_elt (comb, type, expr); return; } else aff_combination_elt (comb, type, build2 (MEM_REF, TREE_TYPE (expr), TREE_OPERAND (expr, 0), build_int_cst (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; CASE_CONVERT: { tree otype = TREE_TYPE (expr); tree inner = TREE_OPERAND (expr, 0); tree itype = TREE_TYPE (inner); enum tree_code icode = TREE_CODE (inner); /* In principle this is a valid folding, but it isn't necessarily an optimization, so do it here and not in fold_unary. */ if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR) && TREE_CODE (itype) == INTEGER_TYPE && TREE_CODE (otype) == INTEGER_TYPE && TYPE_PRECISION (otype) > TYPE_PRECISION (itype)) { tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1); /* If inner type has undefined overflow behavior, fold conversion for below two cases: (T1)(X *+- CST) -> (T1)X *+- (T1)CST (T1)(X + X) -> (T1)X + (T1)X. */ if (TYPE_OVERFLOW_UNDEFINED (itype) && (TREE_CODE (op1) == INTEGER_CST || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0)))) { op0 = fold_convert (otype, op0); op1 = fold_convert (otype, op1); expr = fold_build2 (icode, otype, op0, op1); tree_to_aff_combination (expr, type, comb); return; } wide_int minv, maxv; /* If inner type has wrapping overflow behavior, fold conversion for below case: (T1)(X - CST) -> (T1)X - (T1)CST if X - CST doesn't overflow by range information. Also handle (T1)(X + CST) as (T1)(X - (-CST)). */ if (TYPE_UNSIGNED (itype) && TYPE_OVERFLOW_WRAPS (itype) && TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == INTEGER_CST && icode != MULT_EXPR && get_range_info (op0, &minv, &maxv) == VR_RANGE) { if (icode == PLUS_EXPR) op1 = wide_int_to_tree (itype, -wi::to_wide (op1)); if (wi::geu_p (minv, wi::to_wide (op1))) { op0 = fold_convert (otype, op0); op1 = fold_convert (otype, op1); expr = fold_build2 (MINUS_EXPR, otype, op0, op1); tree_to_aff_combination (expr, type, comb); return; } } } } break; default: { if (poly_int_tree_p (expr)) { aff_combination_const (comb, type, wi::to_poly_widest (expr)); return; } break; } } aff_combination_elt (comb, type, expr); }
void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; HOST_WIDE_INT bitpos, bitsize; enum machine_mode mode; int unsignedp, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case INTEGER_CST: aff_combination_const (comb, type, tree_to_double_int (expr)); return; case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, double_int_minus_one); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, tree_to_double_int (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); aff_combination_add_cst (comb, double_int_minus_one); return; case ADDR_EXPR: /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) { expr = TREE_OPERAND (expr, 0); tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; } core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &volatilep, false); if (bitpos % BITS_PER_UNIT != 0) break; aff_combination_const (comb, type, double_int::from_uhwi (bitpos / BITS_PER_UNIT)); core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, double_int_one); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; case MEM_REF: if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), type, comb); else if (integer_zerop (TREE_OPERAND (expr, 1))) { aff_combination_elt (comb, type, expr); return; } else aff_combination_elt (comb, type, build2 (MEM_REF, TREE_TYPE (expr), TREE_OPERAND (expr, 0), build_int_cst (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; default: break; } aff_combination_elt (comb, type, expr); }
void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; HOST_WIDE_INT bitpos, bitsize; enum machine_mode mode; int unsignedp, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case INTEGER_CST: aff_combination_const (comb, type, tree_to_double_int (expr)); return; case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_convert (&tmp, type); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, double_int_minus_one); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, tree_to_double_int (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); aff_combination_add_cst (comb, double_int_minus_one); return; case ADDR_EXPR: core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &volatilep, false); if (bitpos % BITS_PER_UNIT != 0) break; aff_combination_const (comb, type, uhwi_to_double_int (bitpos / BITS_PER_UNIT)); core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, double_int_one); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; default: break; } aff_combination_elt (comb, type, expr); }