void tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, struct pointer_map_t **cache) { unsigned i; aff_tree to_add, current, curre; tree e, def, rhs; double_int scale; void **slot; struct name_expansion *exp; tree_to_aff_combination (expr, type, comb); aff_combination_zero (&to_add, type); for (i = 0; i < comb->n; i++) { e = comb->elts[i].val; if (TREE_CODE (e) != SSA_NAME) continue; def = SSA_NAME_DEF_STMT (e); if (TREE_CODE (def) != GIMPLE_MODIFY_STMT || GIMPLE_STMT_OPERAND (def, 0) != e) continue; rhs = GIMPLE_STMT_OPERAND (def, 1); if (TREE_CODE (rhs) != SSA_NAME && !EXPR_P (rhs) && !is_gimple_min_invariant (rhs)) continue; /* We do not know whether the reference retains its value at the place where the expansion is used. */ if (REFERENCE_CLASS_P (rhs)) continue; if (!*cache) *cache = pointer_map_create (); slot = pointer_map_insert (*cache, e); exp = *slot; if (!exp) { exp = XNEW (struct name_expansion); exp->in_progress = 1; *slot = exp; tree_to_aff_combination_expand (rhs, type, ¤t, cache); exp->expansion = current; exp->in_progress = 0; } else {
void aff_combination_convert (aff_tree *comb, tree type) { unsigned i, j; tree comb_type = comb->type; if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type)) { tree val = fold_convert (type, aff_combination_to_tree (comb)); tree_to_aff_combination (val, type, comb); return; } comb->type = type; if (comb->rest && !POINTER_TYPE_P (type)) comb->rest = fold_convert (type, comb->rest); if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) return; comb->offset = double_int_ext_for_comb (comb->offset, comb); for (i = j = 0; i < comb->n; i++) { double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb); if (new_coef.is_zero ()) continue; comb->elts[j].coef = new_coef; comb->elts[j].val = fold_convert (type, comb->elts[i].val); j++; } comb->n = j; if (comb->n < MAX_AFF_ELTS && comb->rest) { comb->elts[comb->n].coef = double_int_one; comb->elts[comb->n].val = comb->rest; comb->rest = NULL_TREE; comb->n++; } }
void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; HOST_WIDE_INT bitpos, bitsize; enum machine_mode mode; int unsignedp, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case INTEGER_CST: aff_combination_const (comb, type, tree_to_double_int (expr)); return; case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, double_int_minus_one); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, tree_to_double_int (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); aff_combination_add_cst (comb, double_int_minus_one); return; case ADDR_EXPR: /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) { expr = TREE_OPERAND (expr, 0); tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; } core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &volatilep, false); if (bitpos % BITS_PER_UNIT != 0) break; aff_combination_const (comb, type, double_int::from_uhwi (bitpos / BITS_PER_UNIT)); core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, double_int_one); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; case MEM_REF: if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), type, comb); else if (integer_zerop (TREE_OPERAND (expr, 1))) { aff_combination_elt (comb, type, expr); return; } else aff_combination_elt (comb, type, build2 (MEM_REF, TREE_TYPE (expr), TREE_OPERAND (expr, 0), build_int_cst (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; default: break; } aff_combination_elt (comb, type, expr); }
void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; poly_int64 bitpos, bitsize, bytepos; machine_mode mode; int unsignedp, reversep, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, -1); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, wi::to_widest (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, -1); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, -1); aff_combination_add_cst (comb, -1); return; case ADDR_EXPR: /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) { expr = TREE_OPERAND (expr, 0); tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; } core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &reversep, &volatilep); if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)) break; aff_combination_const (comb, type, bytepos); if (TREE_CODE (core) == MEM_REF) { tree mem_offset = TREE_OPERAND (core, 1); aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset)); core = TREE_OPERAND (core, 0); } else core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, 1); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; case MEM_REF: if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), type, comb); else if (integer_zerop (TREE_OPERAND (expr, 1))) { aff_combination_elt (comb, type, expr); return; } else aff_combination_elt (comb, type, build2 (MEM_REF, TREE_TYPE (expr), TREE_OPERAND (expr, 0), build_int_cst (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_add (comb, &tmp); return; CASE_CONVERT: { tree otype = TREE_TYPE (expr); tree inner = TREE_OPERAND (expr, 0); tree itype = TREE_TYPE (inner); enum tree_code icode = TREE_CODE (inner); /* In principle this is a valid folding, but it isn't necessarily an optimization, so do it here and not in fold_unary. */ if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR) && TREE_CODE (itype) == INTEGER_TYPE && TREE_CODE (otype) == INTEGER_TYPE && TYPE_PRECISION (otype) > TYPE_PRECISION (itype)) { tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1); /* If inner type has undefined overflow behavior, fold conversion for below two cases: (T1)(X *+- CST) -> (T1)X *+- (T1)CST (T1)(X + X) -> (T1)X + (T1)X. */ if (TYPE_OVERFLOW_UNDEFINED (itype) && (TREE_CODE (op1) == INTEGER_CST || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0)))) { op0 = fold_convert (otype, op0); op1 = fold_convert (otype, op1); expr = fold_build2 (icode, otype, op0, op1); tree_to_aff_combination (expr, type, comb); return; } wide_int minv, maxv; /* If inner type has wrapping overflow behavior, fold conversion for below case: (T1)(X - CST) -> (T1)X - (T1)CST if X - CST doesn't overflow by range information. Also handle (T1)(X + CST) as (T1)(X - (-CST)). */ if (TYPE_UNSIGNED (itype) && TYPE_OVERFLOW_WRAPS (itype) && TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == INTEGER_CST && icode != MULT_EXPR && get_range_info (op0, &minv, &maxv) == VR_RANGE) { if (icode == PLUS_EXPR) op1 = wide_int_to_tree (itype, -wi::to_wide (op1)); if (wi::geu_p (minv, wi::to_wide (op1))) { op0 = fold_convert (otype, op0); op1 = fold_convert (otype, op1); expr = fold_build2 (MINUS_EXPR, otype, op0, op1); tree_to_aff_combination (expr, type, comb); return; } } } } break; default: { if (poly_int_tree_p (expr)) { aff_combination_const (comb, type, wi::to_poly_widest (expr)); return; } break; } } aff_combination_elt (comb, type, expr); }
void tree_to_aff_combination (tree expr, tree type, aff_tree *comb) { aff_tree tmp; enum tree_code code; tree cst, core, toffset; HOST_WIDE_INT bitpos, bitsize; enum machine_mode mode; int unsignedp, volatilep; STRIP_NOPS (expr); code = TREE_CODE (expr); switch (code) { case INTEGER_CST: aff_combination_const (comb, type, tree_to_double_int (expr)); return; case POINTER_PLUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); aff_combination_convert (&tmp, type); aff_combination_add (comb, &tmp); return; case PLUS_EXPR: case MINUS_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); if (code == MINUS_EXPR) aff_combination_scale (&tmp, double_int_minus_one); aff_combination_add (comb, &tmp); return; case MULT_EXPR: cst = TREE_OPERAND (expr, 1); if (TREE_CODE (cst) != INTEGER_CST) break; tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, tree_to_double_int (cst)); return; case NEGATE_EXPR: tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); return; case BIT_NOT_EXPR: /* ~x = -x - 1 */ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); aff_combination_scale (comb, double_int_minus_one); aff_combination_add_cst (comb, double_int_minus_one); return; case ADDR_EXPR: core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, &toffset, &mode, &unsignedp, &volatilep, false); if (bitpos % BITS_PER_UNIT != 0) break; aff_combination_const (comb, type, uhwi_to_double_int (bitpos / BITS_PER_UNIT)); core = build_fold_addr_expr (core); if (TREE_CODE (core) == ADDR_EXPR) aff_combination_add_elt (comb, core, double_int_one); else { tree_to_aff_combination (core, type, &tmp); aff_combination_add (comb, &tmp); } if (toffset) { tree_to_aff_combination (toffset, type, &tmp); aff_combination_add (comb, &tmp); } return; default: break; } aff_combination_elt (comb, type, expr); }