1 /* Data flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "pointer-set.h"
30 #include "basic-block.h"
34 #include "langhooks.h"
37 #include "tree-pretty-print.h"
38 #include "tree-dump.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
47 /* Build and maintain data flow information for trees. */
49 /* Counters used to display DFA and SSA statistics. */
57 size_t max_num_phi_args;
63 /* Local functions. */
64 static void collect_dfa_stats (struct dfa_stats_d *);
65 static tree find_vars_r (tree *, int *, void *);
68 /*---------------------------------------------------------------------------
69 Dataflow analysis (DFA) routines
70 ---------------------------------------------------------------------------*/
71 /* Find all the variables referenced in the function. This function
72 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
74 Note that this function does not look for statement operands, it simply
75 determines what variables are referenced in the program and detects
76 various attributes for each variable used by alias analysis and the
80 find_referenced_vars (void)
83 gimple_stmt_iterator si;
87 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
89 gimple stmt = gsi_stmt (si);
90 if (is_gimple_debug (stmt))
92 find_referenced_vars_in (gsi_stmt (si));
95 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
96 find_referenced_vars_in (gsi_stmt (si));
102 struct gimple_opt_pass pass_referenced_vars =
106 "*referenced_vars", /* name */
108 find_referenced_vars, /* execute */
111 0, /* static_pass_number */
112 TV_FIND_REFERENCED_VARS, /* tv_id */
113 PROP_gimple_leh | PROP_cfg, /* properties_required */
114 PROP_referenced_vars, /* properties_provided */
115 0, /* properties_destroyed */
116 TODO_dump_func, /* todo_flags_start */
117 TODO_dump_func /* todo_flags_finish */
122 /*---------------------------------------------------------------------------
124 ---------------------------------------------------------------------------*/
125 /* Create a new annotation for a _DECL node T. */
128 create_var_ann (tree t)
133 gcc_assert (TREE_CODE (t) == VAR_DECL
134 || TREE_CODE (t) == PARM_DECL
135 || TREE_CODE (t) == RESULT_DECL);
137 ann = ggc_alloc_cleared_var_ann_d ();
138 *DECL_VAR_ANN_PTR (t) = ann;
143 /* Renumber all of the gimple stmt uids. */
146 renumber_gimple_stmt_uids (void)
150 set_gimple_stmt_max_uid (cfun, 0);
153 gimple_stmt_iterator bsi;
154 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
156 gimple stmt = gsi_stmt (bsi);
157 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
162 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
163 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
166 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
170 set_gimple_stmt_max_uid (cfun, 0);
171 for (i = 0; i < n_blocks; i++)
173 basic_block bb = blocks[i];
174 gimple_stmt_iterator bsi;
175 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
177 gimple stmt = gsi_stmt (bsi);
178 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
180 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
182 gimple stmt = gsi_stmt (bsi);
183 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
188 /* Build a temporary. Make sure and register it to be renamed. */
191 make_rename_temp (tree type, const char *prefix)
193 tree t = create_tmp_reg (type, prefix);
195 if (gimple_referenced_vars (cfun))
197 add_referenced_var (t);
198 mark_sym_for_renaming (t);
206 /*---------------------------------------------------------------------------
208 ---------------------------------------------------------------------------*/
209 /* Dump the list of all the referenced variables in the current function to
213 dump_referenced_vars (FILE *file)
216 referenced_var_iterator rvi;
218 fprintf (file, "\nReferenced variables in %s: %u\n\n",
219 get_name (current_function_decl), (unsigned) num_referenced_vars);
221 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
223 fprintf (file, "Variable: ");
224 dump_variable (file, var);
227 fprintf (file, "\n");
231 /* Dump the list of all the referenced variables to stderr. */
234 debug_referenced_vars (void)
236 dump_referenced_vars (stderr);
240 /* Dump variable VAR and its may-aliases to FILE. */
243 dump_variable (FILE *file, tree var)
245 if (TREE_CODE (var) == SSA_NAME)
247 if (POINTER_TYPE_P (TREE_TYPE (var)))
248 dump_points_to_info_for (file, var);
249 var = SSA_NAME_VAR (var);
252 if (var == NULL_TREE)
254 fprintf (file, "<nil>");
258 print_generic_expr (file, var, dump_flags);
260 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
261 if (DECL_PT_UID (var) != DECL_UID (var))
262 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
264 fprintf (file, ", ");
265 print_generic_expr (file, TREE_TYPE (var), dump_flags);
267 if (TREE_ADDRESSABLE (var))
268 fprintf (file, ", is addressable");
270 if (is_global_var (var))
271 fprintf (file, ", is global");
273 if (TREE_THIS_VOLATILE (var))
274 fprintf (file, ", is volatile");
276 if (cfun && gimple_default_def (cfun, var))
278 fprintf (file, ", default def: ");
279 print_generic_expr (file, gimple_default_def (cfun, var), dump_flags);
282 if (DECL_INITIAL (var))
284 fprintf (file, ", initial: ");
285 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
288 fprintf (file, "\n");
292 /* Dump variable VAR and its may-aliases to stderr. */
295 debug_variable (tree var)
297 dump_variable (stderr, var);
301 /* Dump various DFA statistics to FILE. */
304 dump_dfa_stats (FILE *file)
306 struct dfa_stats_d dfa_stats;
308 unsigned long size, total = 0;
309 const char * const fmt_str = "%-30s%-13s%12s\n";
310 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
311 const char * const fmt_str_3 = "%-43s%11lu%c\n";
313 = lang_hooks.decl_printable_name (current_function_decl, 2);
315 collect_dfa_stats (&dfa_stats);
317 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
319 fprintf (file, "---------------------------------------------------------\n");
320 fprintf (file, fmt_str, "", " Number of ", "Memory");
321 fprintf (file, fmt_str, "", " instances ", "used ");
322 fprintf (file, "---------------------------------------------------------\n");
324 size = num_referenced_vars * sizeof (tree);
326 fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
327 SCALE (size), LABEL (size));
329 size = dfa_stats.num_var_anns * sizeof (struct var_ann_d);
331 fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns,
332 SCALE (size), LABEL (size));
334 size = dfa_stats.num_uses * sizeof (tree *);
336 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
337 SCALE (size), LABEL (size));
339 size = dfa_stats.num_defs * sizeof (tree *);
341 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
342 SCALE (size), LABEL (size));
344 size = dfa_stats.num_vuses * sizeof (tree *);
346 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
347 SCALE (size), LABEL (size));
349 size = dfa_stats.num_vdefs * sizeof (tree *);
351 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
352 SCALE (size), LABEL (size));
354 size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
356 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
357 SCALE (size), LABEL (size));
359 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
361 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
362 SCALE (size), LABEL (size));
364 fprintf (file, "---------------------------------------------------------\n");
365 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
367 fprintf (file, "---------------------------------------------------------\n");
368 fprintf (file, "\n");
370 if (dfa_stats.num_phis)
371 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
372 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
373 (long) dfa_stats.max_num_phi_args);
375 fprintf (file, "\n");
379 /* Dump DFA statistics on stderr. */
382 debug_dfa_stats (void)
384 dump_dfa_stats (stderr);
388 /* Collect DFA statistics and store them in the structure pointed to by
392 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
395 referenced_var_iterator vi;
398 gcc_assert (dfa_stats_p);
400 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
402 /* Count all the variable annotations. */
403 FOR_EACH_REFERENCED_VAR (cfun, var, vi)
405 dfa_stats_p->num_var_anns++;
407 /* Walk all the statements in the function counting references. */
410 gimple_stmt_iterator si;
412 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
414 gimple phi = gsi_stmt (si);
415 dfa_stats_p->num_phis++;
416 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
417 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
418 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
421 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
423 gimple stmt = gsi_stmt (si);
424 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
425 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
426 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
427 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
433 /*---------------------------------------------------------------------------
434 Miscellaneous helpers
435 ---------------------------------------------------------------------------*/
436 /* Callback for walk_tree. Used to collect variables referenced in
440 find_vars_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
442 /* If we are reading the lto info back in, we need to rescan the
444 if (TREE_CODE (*tp) == SSA_NAME)
445 add_referenced_var (SSA_NAME_VAR (*tp));
447 /* If T is a regular variable that the optimizers are interested
448 in, add it to the list of variables. */
449 else if (SSA_VAR_P (*tp))
450 add_referenced_var (*tp);
452 /* Type, _DECL and constant nodes have no interesting children.
454 else if (IS_TYPE_OR_DECL_P (*tp) || CONSTANT_CLASS_P (*tp))
460 /* Find referenced variables in STMT. In contrast with
461 find_new_referenced_vars, this function will not mark newly found
462 variables for renaming. */
465 find_referenced_vars_in (gimple stmt)
469 if (gimple_code (stmt) != GIMPLE_PHI)
471 for (i = 0; i < gimple_num_ops (stmt); i++)
472 walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL);
476 walk_tree (gimple_phi_result_ptr (stmt), find_vars_r, NULL, NULL);
478 for (i = 0; i < gimple_phi_num_args (stmt); i++)
480 tree arg = gimple_phi_arg_def (stmt, i);
481 walk_tree (&arg, find_vars_r, NULL, NULL);
487 /* Lookup UID in the referenced_vars hashtable and return the associated
491 referenced_var_lookup (struct function *fn, unsigned int uid)
494 struct tree_decl_minimal in;
496 h = (tree) htab_find_with_hash (gimple_referenced_vars (fn), &in, uid);
500 /* Check if TO is in the referenced_vars hash table and insert it if not.
501 Return true if it required insertion. */
504 referenced_var_check_and_insert (tree to)
507 struct tree_decl_minimal in;
508 unsigned int uid = DECL_UID (to);
511 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
514 /* DECL_UID has already been entered in the table. Verify that it is
515 the same entry as TO. See PR 27793. */
516 gcc_assert (h == to);
520 loc = (tree *) htab_find_slot_with_hash (gimple_referenced_vars (cfun),
526 /* Lookup VAR UID in the default_defs hashtable and return the associated
530 gimple_default_def (struct function *fn, tree var)
532 struct tree_decl_minimal ind;
533 struct tree_ssa_name in;
534 gcc_assert (SSA_VAR_P (var));
536 ind.uid = DECL_UID (var);
537 return (tree) htab_find_with_hash (DEFAULT_DEFS (fn), &in, DECL_UID (var));
540 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
543 set_default_def (tree var, tree def)
545 struct tree_decl_minimal ind;
546 struct tree_ssa_name in;
549 gcc_assert (SSA_VAR_P (var));
551 ind.uid = DECL_UID (var);
554 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
555 DECL_UID (var), INSERT);
557 htab_remove_elt (DEFAULT_DEFS (cfun), *loc);
560 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
561 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
562 DECL_UID (var), INSERT);
564 /* Default definition might be changed by tail call optimization. */
566 SSA_NAME_IS_DEFAULT_DEF (*(tree *) loc) = false;
569 /* Mark DEF as the default definition for VAR. */
570 SSA_NAME_IS_DEFAULT_DEF (def) = true;
573 /* Add VAR to the list of referenced variables if it isn't already there. */
576 add_referenced_var (tree var)
579 gcc_assert (DECL_P (var));
581 /* Insert VAR into the referenced_vars has table if it isn't present. */
582 if (referenced_var_check_and_insert (var))
584 /* Scan DECL_INITIAL for pointer variables as they may contain
585 address arithmetic referencing the address of other
586 variables. As we are only interested in directly referenced
587 globals or referenced locals restrict this to initializers
588 than can refer to local variables. */
589 if (DECL_INITIAL (var)
590 && DECL_CONTEXT (var) == current_function_decl)
591 walk_tree (&DECL_INITIAL (var), find_vars_r, NULL, 0);
599 /* Remove VAR from the list. */
602 remove_referenced_var (tree var)
605 struct tree_decl_minimal in;
607 unsigned int uid = DECL_UID (var);
609 /* Preserve var_anns of globals. */
610 if (!is_global_var (var)
611 && (v_ann = var_ann (var)))
614 *DECL_VAR_ANN_PTR (var) = NULL;
616 gcc_assert (DECL_P (var));
618 loc = htab_find_slot_with_hash (gimple_referenced_vars (cfun), &in, uid,
620 htab_clear_slot (gimple_referenced_vars (cfun), loc);
624 /* Return the virtual variable associated to the non-scalar variable VAR. */
627 get_virtual_var (tree var)
631 if (TREE_CODE (var) == SSA_NAME)
632 var = SSA_NAME_VAR (var);
634 while (TREE_CODE (var) == REALPART_EXPR || TREE_CODE (var) == IMAGPART_EXPR
635 || handled_component_p (var))
636 var = TREE_OPERAND (var, 0);
638 /* Treating GIMPLE registers as virtual variables makes no sense.
639 Also complain if we couldn't extract a _DECL out of the original
641 gcc_assert (SSA_VAR_P (var));
642 gcc_assert (!is_gimple_reg (var));
647 /* Mark all the naked symbols in STMT for SSA renaming. */
650 mark_symbols_for_renaming (gimple stmt)
657 /* Mark all the operands for renaming. */
658 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
660 mark_sym_for_renaming (op);
664 /* Find all variables within the gimplified statement that were not
665 previously visible to the function and add them to the referenced
669 find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
670 void *data ATTRIBUTE_UNUSED)
674 if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
676 add_referenced_var (t);
677 mark_sym_for_renaming (t);
680 if (IS_TYPE_OR_DECL_P (t))
687 /* Find any new referenced variables in STMT. */
690 find_new_referenced_vars (gimple stmt)
692 walk_gimple_op (stmt, find_new_referenced_vars_1, NULL);
696 /* If EXP is a handled component reference for a structure, return the
697 base variable. The access range is delimited by bit positions *POFFSET and
698 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
699 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
700 and *PMAX_SIZE are equal, the access is non-variable. */
703 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
704 HOST_WIDE_INT *psize,
705 HOST_WIDE_INT *pmax_size)
707 HOST_WIDE_INT bitsize = -1;
708 HOST_WIDE_INT maxsize = -1;
709 tree size_tree = NULL_TREE;
710 HOST_WIDE_INT bit_offset = 0;
711 bool seen_variable_array_ref = false;
714 /* First get the final access size from just the outermost expression. */
715 if (TREE_CODE (exp) == COMPONENT_REF)
716 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
717 else if (TREE_CODE (exp) == BIT_FIELD_REF)
718 size_tree = TREE_OPERAND (exp, 1);
719 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
723 size_tree = TYPE_SIZE (TREE_TYPE (exp));
725 bitsize = GET_MODE_BITSIZE (mode);
727 if (size_tree != NULL_TREE)
729 if (! host_integerp (size_tree, 1))
732 bitsize = TREE_INT_CST_LOW (size_tree);
735 /* Initially, maxsize is the same as the accessed element size.
736 In the following it will only grow (or become -1). */
739 /* Compute cumulative bit-offset for nested component-refs and array-refs,
740 and find the ultimate containing object. */
743 base_type = TREE_TYPE (exp);
745 switch (TREE_CODE (exp))
748 bit_offset += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
753 tree field = TREE_OPERAND (exp, 1);
754 tree this_offset = component_ref_field_offset (exp);
757 && TREE_CODE (this_offset) == INTEGER_CST
758 && host_integerp (this_offset, 0))
760 HOST_WIDE_INT hthis_offset = TREE_INT_CST_LOW (this_offset);
761 hthis_offset *= BITS_PER_UNIT;
763 += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
764 bit_offset += hthis_offset;
766 /* If we had seen a variable array ref already and we just
767 referenced the last field of a struct or a union member
768 then we have to adjust maxsize by the padding at the end
770 if (seen_variable_array_ref
773 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
774 tree next = DECL_CHAIN (field);
775 while (next && TREE_CODE (next) != FIELD_DECL)
776 next = DECL_CHAIN (next);
778 || TREE_CODE (stype) != RECORD_TYPE)
780 tree fsize = DECL_SIZE_UNIT (field);
781 tree ssize = TYPE_SIZE_UNIT (stype);
782 if (host_integerp (fsize, 0)
783 && host_integerp (ssize, 0))
784 maxsize += ((TREE_INT_CST_LOW (ssize)
785 - TREE_INT_CST_LOW (fsize))
786 * BITS_PER_UNIT - hthis_offset);
794 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
795 /* We need to adjust maxsize to the whole structure bitsize.
796 But we can subtract any constant offset seen so far,
797 because that would get us out of the structure otherwise. */
798 if (maxsize != -1 && csize && host_integerp (csize, 1))
799 maxsize = TREE_INT_CST_LOW (csize) - bit_offset;
807 case ARRAY_RANGE_REF:
809 tree index = TREE_OPERAND (exp, 1);
810 tree low_bound, unit_size;
812 /* If the resulting bit-offset is constant, track it. */
813 if (TREE_CODE (index) == INTEGER_CST
814 && host_integerp (index, 0)
815 && (low_bound = array_ref_low_bound (exp),
816 host_integerp (low_bound, 0))
817 && (unit_size = array_ref_element_size (exp),
818 host_integerp (unit_size, 1)))
820 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
822 hindex -= TREE_INT_CST_LOW (low_bound);
823 hindex *= TREE_INT_CST_LOW (unit_size);
824 hindex *= BITS_PER_UNIT;
825 bit_offset += hindex;
827 /* An array ref with a constant index up in the structure
828 hierarchy will constrain the size of any variable array ref
829 lower in the access hierarchy. */
830 seen_variable_array_ref = false;
834 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
835 /* We need to adjust maxsize to the whole array bitsize.
836 But we can subtract any constant offset seen so far,
837 because that would get us outside of the array otherwise. */
838 if (maxsize != -1 && asize && host_integerp (asize, 1))
839 maxsize = TREE_INT_CST_LOW (asize) - bit_offset;
843 /* Remember that we have seen an array ref with a variable
845 seen_variable_array_ref = true;
854 bit_offset += bitsize;
857 case VIEW_CONVERT_EXPR:
861 /* Hand back the decl for MEM[&decl, off]. */
862 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
864 if (integer_zerop (TREE_OPERAND (exp, 1)))
865 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
868 double_int off = mem_ref_offset (exp);
869 off = double_int_lshift (off,
871 ? 3 : exact_log2 (BITS_PER_UNIT),
872 HOST_BITS_PER_DOUBLE_INT, true);
873 off = double_int_add (off, shwi_to_double_int (bit_offset));
874 if (double_int_fits_in_shwi_p (off))
876 bit_offset = double_int_to_shwi (off);
877 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
884 /* Hand back the decl for MEM[&decl, off]. */
885 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR)
887 /* Via the variable index or index2 we can reach the
889 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
891 exp = TREE_OPERAND (TMR_BASE (exp), 0);
896 if (integer_zerop (TMR_OFFSET (exp)))
897 exp = TREE_OPERAND (TMR_BASE (exp), 0);
900 double_int off = mem_ref_offset (exp);
901 off = double_int_lshift (off,
903 ? 3 : exact_log2 (BITS_PER_UNIT),
904 HOST_BITS_PER_DOUBLE_INT, true);
905 off = double_int_add (off, shwi_to_double_int (bit_offset));
906 if (double_int_fits_in_shwi_p (off))
908 bit_offset = double_int_to_shwi (off);
909 exp = TREE_OPERAND (TMR_BASE (exp), 0);
919 exp = TREE_OPERAND (exp, 0);
923 /* We need to deal with variable arrays ending structures such as
924 struct { int length; int a[1]; } x; x.a[d]
925 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
926 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
927 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
928 where we do not know maxsize for variable index accesses to
929 the array. The simplest way to conservatively deal with this
930 is to punt in the case that offset + maxsize reaches the
931 base type boundary. This needs to include possible trailing padding
932 that is there for alignment purposes. */
934 if (seen_variable_array_ref
936 && (!host_integerp (TYPE_SIZE (base_type), 1)
937 || (bit_offset + maxsize
938 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (base_type)))))
941 /* In case of a decl or constant base object we can do better. */
945 /* If maxsize is unknown adjust it according to the size of the
948 && host_integerp (DECL_SIZE (exp), 1))
949 maxsize = TREE_INT_CST_LOW (DECL_SIZE (exp)) - bit_offset;
951 else if (CONSTANT_CLASS_P (exp))
953 /* If maxsize is unknown adjust it according to the size of the
954 base type constant. */
956 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1))
957 maxsize = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) - bit_offset;
960 /* ??? Due to negative offsets in ARRAY_REF we can end up with
961 negative bit_offset here. We might want to store a zero offset
963 *poffset = bit_offset;
965 *pmax_size = maxsize;
970 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
971 denotes the starting address of the memory access EXP.
972 Returns NULL_TREE if the offset is not constant or any component
973 is not BITS_PER_UNIT-aligned. */
976 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
978 HOST_WIDE_INT byte_offset = 0;
980 /* Compute cumulative byte-offset for nested component-refs and array-refs,
981 and find the ultimate containing object. */
984 switch (TREE_CODE (exp))
991 tree field = TREE_OPERAND (exp, 1);
992 tree this_offset = component_ref_field_offset (exp);
993 HOST_WIDE_INT hthis_offset;
996 || TREE_CODE (this_offset) != INTEGER_CST
997 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1001 hthis_offset = TREE_INT_CST_LOW (this_offset);
1002 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
1004 byte_offset += hthis_offset;
1009 case ARRAY_RANGE_REF:
1011 tree index = TREE_OPERAND (exp, 1);
1012 tree low_bound, unit_size;
1014 /* If the resulting bit-offset is constant, track it. */
1015 if (TREE_CODE (index) == INTEGER_CST
1016 && (low_bound = array_ref_low_bound (exp),
1017 TREE_CODE (low_bound) == INTEGER_CST)
1018 && (unit_size = array_ref_element_size (exp),
1019 TREE_CODE (unit_size) == INTEGER_CST))
1021 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
1023 hindex -= TREE_INT_CST_LOW (low_bound);
1024 hindex *= TREE_INT_CST_LOW (unit_size);
1025 byte_offset += hindex;
1036 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
1039 case VIEW_CONVERT_EXPR:
1043 /* Hand back the decl for MEM[&decl, off]. */
1044 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
1046 if (!integer_zerop (TREE_OPERAND (exp, 1)))
1048 double_int off = mem_ref_offset (exp);
1049 gcc_assert (off.high == -1 || off.high == 0);
1050 byte_offset += double_int_to_shwi (off);
1052 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1056 case TARGET_MEM_REF:
1057 /* Hand back the decl for MEM[&decl, off]. */
1058 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR)
1060 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
1062 if (!integer_zerop (TMR_OFFSET (exp)))
1064 double_int off = mem_ref_offset (exp);
1065 gcc_assert (off.high == -1 || off.high == 0);
1066 byte_offset += double_int_to_shwi (off);
1068 exp = TREE_OPERAND (TMR_BASE (exp), 0);
1076 exp = TREE_OPERAND (exp, 0);
1080 *poffset = byte_offset;
1084 /* Returns true if STMT references an SSA_NAME that has
1085 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
1088 stmt_references_abnormal_ssa_name (gimple stmt)
1091 use_operand_p use_p;
1093 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
1095 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))