- else
- x = gen_raw_REG (Pmode, (*regno)++);
-
- return gen_rtx_MEM (DECL_MODE (obj), x);
-}
-
-/* Prepares decl_rtl for variables referred in *EXPR_P. Callback for
- walk_tree. DATA contains the actual fake register number. */
-
-static tree
-prepare_decl_rtl (tree *expr_p, int *ws, void *data)
-{
- tree obj = NULL_TREE;
- rtx x = NULL_RTX;
- int *regno = data;
-
- switch (TREE_CODE (*expr_p))
- {
- case ADDR_EXPR:
- for (expr_p = &TREE_OPERAND (*expr_p, 0);
- handled_component_p (*expr_p);
- expr_p = &TREE_OPERAND (*expr_p, 0))
- continue;
- obj = *expr_p;
- if (DECL_P (obj) && !DECL_RTL_SET_P (obj))
- x = produce_memory_decl_rtl (obj, regno);
- break;
-
- case SSA_NAME:
- *ws = 0;
- obj = SSA_NAME_VAR (*expr_p);
- if (!DECL_RTL_SET_P (obj))
- x = gen_raw_REG (DECL_MODE (obj), (*regno)++);
- break;
-
- case VAR_DECL:
- case PARM_DECL:
- case RESULT_DECL:
- *ws = 0;
- obj = *expr_p;
-
- if (DECL_RTL_SET_P (obj))
- break;
-
- if (DECL_MODE (obj) == BLKmode)
- x = produce_memory_decl_rtl (obj, regno);
- else
- x = gen_raw_REG (DECL_MODE (obj), (*regno)++);
-
- break;
-
- default:
- break;
- }
-
- if (x)
- {
- VEC_safe_push (tree, heap, decl_rtl_to_reset, obj);
- SET_DECL_RTL (obj, x);
- }
-
- return NULL_TREE;
-}
-
-/* Determines cost of the computation of EXPR. */
-
-static unsigned
-computation_cost (tree expr)
-{
- rtx seq, rslt;
- tree type = TREE_TYPE (expr);
- unsigned cost;
- /* Avoid using hard regs in ways which may be unsupported. */
- int regno = LAST_VIRTUAL_REGISTER + 1;
-
- walk_tree (&expr, prepare_decl_rtl, ®no, NULL);
- start_sequence ();
- rslt = expand_expr (expr, NULL_RTX, TYPE_MODE (type), EXPAND_NORMAL);
- seq = get_insns ();
- end_sequence ();
-
- cost = seq_cost (seq);
- if (MEM_P (rslt))
- cost += address_cost (XEXP (rslt, 0), TYPE_MODE (type));
-
- return cost;
-}
-
-/* Returns variable containing the value of candidate CAND at statement AT. */
-
-static tree
-var_at_stmt (struct loop *loop, struct iv_cand *cand, tree stmt)
-{
- if (stmt_after_increment (loop, cand, stmt))
- return cand->var_after;
- else
- return cand->var_before;
-}
-
-/* Return the most significant (sign) bit of T. Similar to tree_int_cst_msb,
- but the bit is determined from TYPE_PRECISION, not MODE_BITSIZE. */
-
-int
-tree_int_cst_sign_bit (tree t)
-{
- unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
- unsigned HOST_WIDE_INT w;
-
- if (bitno < HOST_BITS_PER_WIDE_INT)
- w = TREE_INT_CST_LOW (t);
- else
- {
- w = TREE_INT_CST_HIGH (t);
- bitno -= HOST_BITS_PER_WIDE_INT;
- }
-
- return (w >> bitno) & 1;
-}
-
-/* If we can prove that TOP = cst * BOT for some constant cst,
- store cst to MUL and return true. Otherwise return false.
- The returned value is always sign-extended, regardless of the
- signedness of TOP and BOT. */
-
-static bool
-constant_multiple_of (tree top, tree bot, double_int *mul)
-{
- tree mby;
- enum tree_code code;
- double_int res, p0, p1;
- unsigned precision = TYPE_PRECISION (TREE_TYPE (top));
-
- STRIP_NOPS (top);
- STRIP_NOPS (bot);
-
- if (operand_equal_p (top, bot, 0))
- {
- *mul = double_int_one;
- return true;
- }
-
- code = TREE_CODE (top);
- switch (code)
- {
- case MULT_EXPR:
- mby = TREE_OPERAND (top, 1);
- if (TREE_CODE (mby) != INTEGER_CST)
- return false;
-
- if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &res))
- return false;
-
- *mul = double_int_sext (double_int_mul (res, tree_to_double_int (mby)),
- precision);
- return true;
-
- case PLUS_EXPR:
- case MINUS_EXPR:
- if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &p0)
- || !constant_multiple_of (TREE_OPERAND (top, 1), bot, &p1))
- return false;
-
- if (code == MINUS_EXPR)
- p1 = double_int_neg (p1);
- *mul = double_int_sext (double_int_add (p0, p1), precision);
- return true;
-
- case INTEGER_CST:
- if (TREE_CODE (bot) != INTEGER_CST)
- return false;
-
- p0 = double_int_sext (tree_to_double_int (bot), precision);
- p1 = double_int_sext (tree_to_double_int (top), precision);
- if (double_int_zero_p (p1))
- return false;
- *mul = double_int_sext (double_int_sdivmod (p0, p1, FLOOR_DIV_EXPR, &res),
- precision);
- return double_int_zero_p (res);
-
- default:
- return false;
- }
-}
-
-/* Sets COMB to CST. */
-
-static void
-aff_combination_const (struct affine_tree_combination *comb, tree type,
- unsigned HOST_WIDE_INT cst)
-{
- unsigned prec = TYPE_PRECISION (type);
-
- comb->type = type;
- comb->mask = (((unsigned HOST_WIDE_INT) 2 << (prec - 1)) - 1);
-
- comb->n = 0;
- comb->rest = NULL_TREE;
- comb->offset = cst & comb->mask;
-}
-
-/* Sets COMB to single element ELT. */
-
-static void
-aff_combination_elt (struct affine_tree_combination *comb, tree type, tree elt)
-{
- unsigned prec = TYPE_PRECISION (type);
-
- comb->type = type;
- comb->mask = (((unsigned HOST_WIDE_INT) 2 << (prec - 1)) - 1);
-
- comb->n = 1;
- comb->elts[0] = elt;
- comb->coefs[0] = 1;
- comb->rest = NULL_TREE;
- comb->offset = 0;
-}
-
-/* Scales COMB by SCALE. */
-
-static void
-aff_combination_scale (struct affine_tree_combination *comb,
- unsigned HOST_WIDE_INT scale)
-{
- unsigned i, j;
-
- if (scale == 1)
- return;
-
- if (scale == 0)
- {
- aff_combination_const (comb, comb->type, 0);
- return;
- }
-
- comb->offset = (scale * comb->offset) & comb->mask;
- for (i = 0, j = 0; i < comb->n; i++)
- {
- comb->coefs[j] = (scale * comb->coefs[i]) & comb->mask;
- comb->elts[j] = comb->elts[i];
- if (comb->coefs[j] != 0)
- j++;
- }
- comb->n = j;
-
- if (comb->rest)
- {
- if (comb->n < MAX_AFF_ELTS)
- {
- comb->coefs[comb->n] = scale;
- comb->elts[comb->n] = comb->rest;
- comb->rest = NULL_TREE;
- comb->n++;
- }
- else
- comb->rest = fold_build2 (MULT_EXPR, comb->type, comb->rest,
- build_int_cst_type (comb->type, scale));
- }
-}
-
-/* Adds ELT * SCALE to COMB. */
-
-static void
-aff_combination_add_elt (struct affine_tree_combination *comb, tree elt,
- unsigned HOST_WIDE_INT scale)
-{
- unsigned i;
-
- if (scale == 0)
- return;
-
- for (i = 0; i < comb->n; i++)
- if (operand_equal_p (comb->elts[i], elt, 0))
- {
- comb->coefs[i] = (comb->coefs[i] + scale) & comb->mask;
- if (comb->coefs[i])
- return;
-
- comb->n--;
- comb->coefs[i] = comb->coefs[comb->n];
- comb->elts[i] = comb->elts[comb->n];
-
- if (comb->rest)
- {
- gcc_assert (comb->n == MAX_AFF_ELTS - 1);
- comb->coefs[comb->n] = 1;
- comb->elts[comb->n] = comb->rest;
- comb->rest = NULL_TREE;
- comb->n++;
- }
- return;
- }
- if (comb->n < MAX_AFF_ELTS)
- {
- comb->coefs[comb->n] = scale;
- comb->elts[comb->n] = elt;
- comb->n++;
- return;
- }
-
- if (scale == 1)
- elt = fold_convert (comb->type, elt);
- else
- elt = fold_build2 (MULT_EXPR, comb->type,
- fold_convert (comb->type, elt),
- build_int_cst_type (comb->type, scale));
-
- if (comb->rest)
- comb->rest = fold_build2 (PLUS_EXPR, comb->type, comb->rest, elt);
- else
- comb->rest = elt;
-}
-
-/* Adds COMB2 to COMB1. */
-
-static void
-aff_combination_add (struct affine_tree_combination *comb1,
- struct affine_tree_combination *comb2)
-{
- unsigned i;
-
- comb1->offset = (comb1->offset + comb2->offset) & comb1->mask;
- for (i = 0; i < comb2->n; i++)
- aff_combination_add_elt (comb1, comb2->elts[i], comb2->coefs[i]);
- if (comb2->rest)
- aff_combination_add_elt (comb1, comb2->rest, 1);
-}
-
-/* Convert COMB to TYPE. */
-
-static void
-aff_combination_convert (tree type, struct affine_tree_combination *comb)
-{
- unsigned prec = TYPE_PRECISION (type);
- unsigned i;
-
- /* If the precision of both types is the same, it suffices to change the type
- of the whole combination -- the elements are allowed to have another type
- equivalent wrto STRIP_NOPS. */
- if (prec == TYPE_PRECISION (comb->type))
- {
- comb->type = type;
- return;
- }
-
- comb->mask = (((unsigned HOST_WIDE_INT) 2 << (prec - 1)) - 1);
- comb->offset = comb->offset & comb->mask;
-
- /* The type of the elements can be different from comb->type only as
- much as what STRIP_NOPS would remove. We can just directly cast
- to TYPE. */
- for (i = 0; i < comb->n; i++)
- comb->elts[i] = fold_convert (type, comb->elts[i]);
- if (comb->rest)
- comb->rest = fold_convert (type, comb->rest);
-
- comb->type = type;
-}
-
-/* Splits EXPR into an affine combination of parts. */
-
-static void
-tree_to_aff_combination (tree expr, tree type,
- struct affine_tree_combination *comb)
-{
- struct affine_tree_combination tmp;
- enum tree_code code;
- tree cst, core, toffset;
- HOST_WIDE_INT bitpos, bitsize;
- enum machine_mode mode;
- int unsignedp, volatilep;
-
- STRIP_NOPS (expr);
-
- code = TREE_CODE (expr);
- switch (code)
- {
- case INTEGER_CST:
- aff_combination_const (comb, type, int_cst_value (expr));
- return;
-
- case PLUS_EXPR:
- case MINUS_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
- if (code == MINUS_EXPR)
- aff_combination_scale (&tmp, -1);
- aff_combination_add (comb, &tmp);
- return;
-
- case MULT_EXPR:
- cst = TREE_OPERAND (expr, 1);
- if (TREE_CODE (cst) != INTEGER_CST)
- break;
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, int_cst_value (cst));
- return;
-
- case NEGATE_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, -1);
- return;
-
- case ADDR_EXPR:
- core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &volatilep,
- false);
- if (bitpos % BITS_PER_UNIT != 0)
- break;
- aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
- core = build_fold_addr_expr (core);
- if (TREE_CODE (core) == ADDR_EXPR)
- aff_combination_add_elt (comb, core, 1);
- else
- {
- tree_to_aff_combination (core, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- if (toffset)
- {
- tree_to_aff_combination (toffset, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- return;
-
- default:
- break;