1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt)
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt) != TREE_BLOCK (t)))
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set alrady to "multiple places" don't
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted;
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase;
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls;
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer;
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
209 align_local_variable (tree decl)
211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
212 DECL_ALIGN (decl) = align;
213 return align / BITS_PER_UNIT;
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
220 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222 HOST_WIDE_INT offset, new_frame_offset;
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
241 frame_offset = new_frame_offset;
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
249 /* Accumulate DECL into STACK_VARS. */
252 add_stack_var (tree decl)
256 if (stack_vars_num >= stack_vars_alloc)
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 stack_vars_alloc = 32;
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265 v = &stack_vars[stack_vars_num];
268 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
269 /* Ensure that all variables have size, so that &a != &b for any two
270 variables that are simultaneously live. */
273 v->alignb = align_local_variable (SSAVAR (decl));
274 /* An alignment of zero can mightily confuse us later. */
275 gcc_assert (v->alignb != 0);
277 /* All variables are initially in their own partition. */
278 v->representative = stack_vars_num;
281 /* All variables initially conflict with no other. */
284 /* Ensure that this decl doesn't get put onto the list twice. */
285 set_rtl (decl, pc_rtx);
290 /* Make the decls associated with luid's X and Y conflict. */
293 add_stack_var_conflict (size_t x, size_t y)
295 struct stack_var *a = &stack_vars[x];
296 struct stack_var *b = &stack_vars[y];
298 a->conflicts = BITMAP_ALLOC (NULL);
300 b->conflicts = BITMAP_ALLOC (NULL);
301 bitmap_set_bit (a->conflicts, y);
302 bitmap_set_bit (b->conflicts, x);
305 /* Check whether the decls associated with luid's X and Y conflict. */
308 stack_var_conflict_p (size_t x, size_t y)
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
312 if (!a->conflicts || !b->conflicts)
314 return bitmap_bit_p (a->conflicts, y);
317 /* Returns true if TYPE is or contains a union type. */
320 aggregate_contains_union_type (tree type)
324 if (TREE_CODE (type) == UNION_TYPE
325 || TREE_CODE (type) == QUAL_UNION_TYPE)
327 if (TREE_CODE (type) == ARRAY_TYPE)
328 return aggregate_contains_union_type (TREE_TYPE (type));
329 if (TREE_CODE (type) != RECORD_TYPE)
332 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
333 if (TREE_CODE (field) == FIELD_DECL)
334 if (aggregate_contains_union_type (TREE_TYPE (field)))
340 /* A subroutine of expand_used_vars. If two variables X and Y have alias
341 sets that do not conflict, then do add a conflict for these variables
342 in the interference graph. We also need to make sure to add conflicts
343 for union containing structures. Else RTL alias analysis comes along
344 and due to type based aliasing rules decides that for two overlapping
345 union temporaries { short s; int i; } accesses to the same mem through
346 different types may not alias and happily reorders stores across
347 life-time boundaries of the temporaries (See PR25654).
348 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 add_alias_set_conflicts (void)
353 size_t i, j, n = stack_vars_num;
355 for (i = 0; i < n; ++i)
357 tree type_i = TREE_TYPE (stack_vars[i].decl);
358 bool aggr_i = AGGREGATE_TYPE_P (type_i);
361 contains_union = aggregate_contains_union_type (type_i);
362 for (j = 0; j < i; ++j)
364 tree type_j = TREE_TYPE (stack_vars[j].decl);
365 bool aggr_j = AGGREGATE_TYPE_P (type_j);
367 /* Either the objects conflict by means of type based
368 aliasing rules, or we need to add a conflict. */
369 || !objects_must_conflict_p (type_i, type_j)
370 /* In case the types do not conflict ensure that access
371 to elements will conflict. In case of unions we have
372 to be careful as type based aliasing rules may say
373 access to the same memory does not conflict. So play
374 safe and add a conflict in this case when
375 -fstrict-aliasing is used. */
376 || (contains_union && flag_strict_aliasing))
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
411 /* Tertiary compare on true alignment, decreasing. */
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
427 else if (TREE_CODE (declb) == SSA_NAME)
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
454 /* The pointed-to vars bitmap is shared, it is enough to
456 || pointer_set_insert(visited, pt->vars))
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A. */
570 union_stack_vars (size_t a, size_t b)
572 struct stack_var *vb = &stack_vars[b];
576 gcc_assert (stack_vars[b].next == EOC);
577 /* Add B to A's partition. */
578 stack_vars[b].next = stack_vars[a].next;
579 stack_vars[b].representative = a;
580 stack_vars[a].next = b;
582 /* Update the required alignment of partition A to account for B. */
583 if (stack_vars[a].alignb < stack_vars[b].alignb)
584 stack_vars[a].alignb = stack_vars[b].alignb;
586 /* Update the interference graph and merge the conflicts. */
589 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
590 add_stack_var_conflict (a, stack_vars[u].representative);
591 BITMAP_FREE (vb->conflicts);
595 /* A subroutine of expand_used_vars. Binpack the variables into
596 partitions constrained by the interference graph. The overall
597 algorithm used is as follows:
599 Sort the objects by size in descending order.
604 Look for the largest non-conflicting object B with size <= S.
611 partition_stack_vars (void)
613 size_t si, sj, n = stack_vars_num;
615 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
616 for (si = 0; si < n; ++si)
617 stack_vars_sorted[si] = si;
622 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
624 for (si = 0; si < n; ++si)
626 size_t i = stack_vars_sorted[si];
627 unsigned int ialign = stack_vars[i].alignb;
629 /* Ignore objects that aren't partition representatives. If we
630 see a var that is not a partition representative, it must
631 have been merged earlier. */
632 if (stack_vars[i].representative != i)
635 for (sj = si + 1; sj < n; ++sj)
637 size_t j = stack_vars_sorted[sj];
638 unsigned int jalign = stack_vars[j].alignb;
640 /* Ignore objects that aren't partition representatives. */
641 if (stack_vars[j].representative != j)
644 /* Ignore conflicting objects. */
645 if (stack_var_conflict_p (i, j))
648 /* Do not mix objects of "small" (supported) alignment
649 and "large" (unsupported) alignment. */
650 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
651 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j);
659 update_alias_info_with_stack_vars ();
662 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
665 dump_stack_var_partition (void)
667 size_t si, i, j, n = stack_vars_num;
669 for (si = 0; si < n; ++si)
671 i = stack_vars_sorted[si];
673 /* Skip variables that aren't partition representatives, for now. */
674 if (stack_vars[i].representative != i)
677 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
678 " align %u\n", (unsigned long) i, stack_vars[i].size,
679 stack_vars[i].alignb);
681 for (j = i; j != EOC; j = stack_vars[j].next)
683 fputc ('\t', dump_file);
684 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
686 fputc ('\n', dump_file);
690 /* Assign rtl to DECL at BASE + OFFSET. */
693 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
694 HOST_WIDE_INT offset)
699 /* If this fails, we've overflowed the stack frame. Error nicely? */
700 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
702 x = plus_constant (base, offset);
703 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
705 if (TREE_CODE (decl) != SSA_NAME)
707 /* Set alignment we actually gave this decl if it isn't an SSA name.
708 If it is we generate stack slots only accidentally so it isn't as
709 important, we'll simply use the alignment that is already set. */
710 if (base == virtual_stack_vars_rtx)
711 offset -= frame_phase;
712 align = offset & -offset;
713 align *= BITS_PER_UNIT;
714 if (align == 0 || align > base_align)
717 /* One would think that we could assert that we're not decreasing
718 alignment here, but (at least) the i386 port does exactly this
719 via the MINIMUM_ALIGNMENT hook. */
721 DECL_ALIGN (decl) = align;
722 DECL_USER_ALIGN (decl) = 0;
725 set_mem_attributes (x, SSAVAR (decl), true);
729 /* A subroutine of expand_used_vars. Give each partition representative
730 a unique location within the stack frame. Update each partition member
731 with that location. */
734 expand_stack_vars (bool (*pred) (tree))
736 size_t si, i, j, n = stack_vars_num;
737 HOST_WIDE_INT large_size = 0, large_alloc = 0;
738 rtx large_base = NULL;
739 unsigned large_align = 0;
742 /* Determine if there are any variables requiring "large" alignment.
743 Since these are dynamically allocated, we only process these if
744 no predicate involved. */
745 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
746 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
748 /* Find the total size of these variables. */
749 for (si = 0; si < n; ++si)
753 i = stack_vars_sorted[si];
754 alignb = stack_vars[i].alignb;
756 /* Stop when we get to the first decl with "small" alignment. */
757 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 /* Skip variables that aren't partition representatives. */
761 if (stack_vars[i].representative != i)
764 /* Skip variables that have already had rtl assigned. See also
765 add_stack_var where we perpetrate this pc_rtx hack. */
766 decl = stack_vars[i].decl;
767 if ((TREE_CODE (decl) == SSA_NAME
768 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
769 : DECL_RTL (decl)) != pc_rtx)
772 large_size += alignb - 1;
773 large_size &= -(HOST_WIDE_INT)alignb;
774 large_size += stack_vars[i].size;
777 /* If there were any, allocate space. */
779 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
783 for (si = 0; si < n; ++si)
786 unsigned base_align, alignb;
787 HOST_WIDE_INT offset;
789 i = stack_vars_sorted[si];
791 /* Skip variables that aren't partition representatives, for now. */
792 if (stack_vars[i].representative != i)
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
803 /* Check the predicate to see whether this variable should be
804 allocated in this pass. */
805 if (pred && !pred (decl))
808 alignb = stack_vars[i].alignb;
809 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
811 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
812 base = virtual_stack_vars_rtx;
813 base_align = crtl->max_used_stack_slot_alignment;
817 /* Large alignment is only processed in the last pass. */
820 gcc_assert (large_base != NULL);
822 large_alloc += alignb - 1;
823 large_alloc &= -(HOST_WIDE_INT)alignb;
824 offset = large_alloc;
825 large_alloc += stack_vars[i].size;
828 base_align = large_align;
831 /* Create rtl for each variable based on their location within the
833 for (j = i; j != EOC; j = stack_vars[j].next)
835 expand_one_stack_var_at (stack_vars[j].decl,
841 gcc_assert (large_alloc == large_size);
844 /* Take into account all sizes of partitions and reset DECL_RTLs. */
846 account_stack_vars (void)
848 size_t si, j, i, n = stack_vars_num;
849 HOST_WIDE_INT size = 0;
851 for (si = 0; si < n; ++si)
853 i = stack_vars_sorted[si];
855 /* Skip variables that aren't partition representatives, for now. */
856 if (stack_vars[i].representative != i)
859 size += stack_vars[i].size;
860 for (j = i; j != EOC; j = stack_vars[j].next)
861 set_rtl (stack_vars[j].decl, NULL);
866 /* A subroutine of expand_one_var. Called to immediately assign rtl
867 to a variable to be allocated in the stack frame. */
870 expand_one_stack_var (tree var)
872 HOST_WIDE_INT size, offset;
875 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
876 byte_align = align_local_variable (SSAVAR (var));
878 /* We handle highly aligned variables in expand_stack_vars. */
879 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
881 offset = alloc_stack_frame_space (size, byte_align);
883 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
884 crtl->max_used_stack_slot_alignment, offset);
887 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
888 that will reside in a hard register. */
891 expand_one_hard_reg_var (tree var)
893 rest_of_decl_compilation (var, 0, 0);
896 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
897 that will reside in a pseudo register. */
900 expand_one_register_var (tree var)
902 tree decl = SSAVAR (var);
903 tree type = TREE_TYPE (decl);
904 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
905 rtx x = gen_reg_rtx (reg_mode);
909 /* Note if the object is a user variable. */
910 if (!DECL_ARTIFICIAL (decl))
913 if (POINTER_TYPE_P (type))
914 mark_reg_pointer (x, get_pointer_alignment (var));
917 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
918 has some associated error, e.g. its type is error-mark. We just need
919 to pick something that won't crash the rest of the compiler. */
922 expand_one_error_var (tree var)
924 enum machine_mode mode = DECL_MODE (var);
928 x = gen_rtx_MEM (BLKmode, const0_rtx);
929 else if (mode == VOIDmode)
932 x = gen_reg_rtx (mode);
934 SET_DECL_RTL (var, x);
937 /* A subroutine of expand_one_var. VAR is a variable that will be
938 allocated to the local stack frame. Return true if we wish to
939 add VAR to STACK_VARS so that it will be coalesced with other
940 variables. Return false to allocate VAR immediately.
942 This function is used to reduce the number of variables considered
943 for coalescing, which reduces the size of the quadratic problem. */
946 defer_stack_allocation (tree var, bool toplevel)
948 /* If stack protection is enabled, *all* stack variables must be deferred,
949 so that we can re-order the strings to the top of the frame. */
950 if (flag_stack_protect)
953 /* We handle "large" alignment via dynamic allocation. We want to handle
954 this extra complication in only one place, so defer them. */
955 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
958 /* Variables in the outermost scope automatically conflict with
959 every other variable. The only reason to want to defer them
960 at all is that, after sorting, we can more efficiently pack
961 small variables in the stack frame. Continue to defer at -O2. */
962 if (toplevel && optimize < 2)
965 /* Without optimization, *most* variables are allocated from the
966 stack, which makes the quadratic problem large exactly when we
967 want compilation to proceed as quickly as possible. On the
968 other hand, we don't want the function's stack frame size to
969 get completely out of hand. So we avoid adding scalars and
970 "small" aggregates to the list at all. */
971 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
977 /* A subroutine of expand_used_vars. Expand one variable according to
978 its flavor. Variables to be placed on the stack are not actually
979 expanded yet, merely recorded.
980 When REALLY_EXPAND is false, only add stack values to be allocated.
981 Return stack usage this variable is supposed to take.
985 expand_one_var (tree var, bool toplevel, bool really_expand)
987 unsigned int align = BITS_PER_UNIT;
992 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
994 /* Because we don't know if VAR will be in register or on stack,
995 we conservatively assume it will be on stack even if VAR is
996 eventually put into register after RA pass. For non-automatic
997 variables, which won't be on stack, we collect alignment of
998 type and ignore user specified alignment. */
999 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1000 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1001 TYPE_MODE (TREE_TYPE (var)),
1002 TYPE_ALIGN (TREE_TYPE (var)));
1003 else if (DECL_HAS_VALUE_EXPR_P (var)
1004 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1005 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1006 or variables which were assigned a stack slot already by
1007 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1008 changed from the offset chosen to it. */
1009 align = crtl->stack_alignment_estimated;
1011 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1013 /* If the variable alignment is very large we'll dynamicaly allocate
1014 it, which means that in-frame portion is just a pointer. */
1015 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1016 align = POINTER_SIZE;
1019 if (SUPPORTS_STACK_ALIGNMENT
1020 && crtl->stack_alignment_estimated < align)
1022 /* stack_alignment_estimated shouldn't change after stack
1023 realign decision made */
1024 gcc_assert(!crtl->stack_realign_processed);
1025 crtl->stack_alignment_estimated = align;
1028 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1029 So here we only make sure stack_alignment_needed >= align. */
1030 if (crtl->stack_alignment_needed < align)
1031 crtl->stack_alignment_needed = align;
1032 if (crtl->max_used_stack_slot_alignment < align)
1033 crtl->max_used_stack_slot_alignment = align;
1035 if (TREE_CODE (origvar) == SSA_NAME)
1037 gcc_assert (TREE_CODE (var) != VAR_DECL
1038 || (!DECL_EXTERNAL (var)
1039 && !DECL_HAS_VALUE_EXPR_P (var)
1040 && !TREE_STATIC (var)
1041 && TREE_TYPE (var) != error_mark_node
1042 && !DECL_HARD_REGISTER (var)
1045 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1047 else if (DECL_EXTERNAL (var))
1049 else if (DECL_HAS_VALUE_EXPR_P (var))
1051 else if (TREE_STATIC (var))
1053 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1055 else if (TREE_TYPE (var) == error_mark_node)
1058 expand_one_error_var (var);
1060 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1063 expand_one_hard_reg_var (var);
1065 else if (use_register_for_decl (var))
1068 expand_one_register_var (origvar);
1070 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1074 error ("size of variable %q+D is too large", var);
1075 expand_one_error_var (var);
1078 else if (defer_stack_allocation (var, toplevel))
1079 add_stack_var (origvar);
1083 expand_one_stack_var (origvar);
1084 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1089 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1090 expanding variables. Those variables that can be put into registers
1091 are allocated pseudos; those that can't are put on the stack.
1093 TOPLEVEL is true if this is the outermost BLOCK. */
1096 expand_used_vars_for_block (tree block, bool toplevel)
1098 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1101 old_sv_num = toplevel ? 0 : stack_vars_num;
1103 /* Expand all variables at this level. */
1104 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1106 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1107 || !DECL_NONSHAREABLE (t)))
1108 expand_one_var (t, toplevel, true);
1110 this_sv_num = stack_vars_num;
1112 /* Expand all variables at containing levels. */
1113 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1114 expand_used_vars_for_block (t, false);
1116 /* Since we do not track exact variable lifetimes (which is not even
1117 possible for variables whose address escapes), we mirror the block
1118 tree in the interference graph. Here we cause all variables at this
1119 level, and all sublevels, to conflict. */
1120 if (old_sv_num < this_sv_num)
1122 new_sv_num = stack_vars_num;
1124 for (i = old_sv_num; i < new_sv_num; ++i)
1125 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1126 add_stack_var_conflict (i, j);
1130 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1131 and clear TREE_USED on all local variables. */
1134 clear_tree_used (tree block)
1138 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1139 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1140 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1141 || !DECL_NONSHAREABLE (t))
1144 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1145 clear_tree_used (t);
1148 /* Examine TYPE and determine a bit mask of the following features. */
1150 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1151 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1152 #define SPCT_HAS_ARRAY 4
1153 #define SPCT_HAS_AGGREGATE 8
1156 stack_protect_classify_type (tree type)
1158 unsigned int ret = 0;
1161 switch (TREE_CODE (type))
1164 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1165 if (t == char_type_node
1166 || t == signed_char_type_node
1167 || t == unsigned_char_type_node)
1169 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1170 unsigned HOST_WIDE_INT len;
1172 if (!TYPE_SIZE_UNIT (type)
1173 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1176 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1179 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1181 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1184 ret = SPCT_HAS_ARRAY;
1188 case QUAL_UNION_TYPE:
1190 ret = SPCT_HAS_AGGREGATE;
1191 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1192 if (TREE_CODE (t) == FIELD_DECL)
1193 ret |= stack_protect_classify_type (TREE_TYPE (t));
1203 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1204 part of the local stack frame. Remember if we ever return nonzero for
1205 any variable in this function. The return value is the phase number in
1206 which the variable should be allocated. */
1209 stack_protect_decl_phase (tree decl)
1211 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1214 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1215 has_short_buffer = true;
1217 if (flag_stack_protect == 2)
1219 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1220 && !(bits & SPCT_HAS_AGGREGATE))
1222 else if (bits & SPCT_HAS_ARRAY)
1226 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1229 has_protected_decls = true;
1234 /* Two helper routines that check for phase 1 and phase 2. These are used
1235 as callbacks for expand_stack_vars. */
1238 stack_protect_decl_phase_1 (tree decl)
1240 return stack_protect_decl_phase (decl) == 1;
1244 stack_protect_decl_phase_2 (tree decl)
1246 return stack_protect_decl_phase (decl) == 2;
1249 /* Ensure that variables in different stack protection phases conflict
1250 so that they are not merged and share the same stack slot. */
1253 add_stack_protection_conflicts (void)
1255 size_t i, j, n = stack_vars_num;
1256 unsigned char *phase;
1258 phase = XNEWVEC (unsigned char, n);
1259 for (i = 0; i < n; ++i)
1260 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1262 for (i = 0; i < n; ++i)
1264 unsigned char ph_i = phase[i];
1265 for (j = 0; j < i; ++j)
1266 if (ph_i != phase[j])
1267 add_stack_var_conflict (i, j);
1273 /* Create a decl for the guard at the top of the stack frame. */
1276 create_stack_guard (void)
1278 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1279 VAR_DECL, NULL, ptr_type_node);
1280 TREE_THIS_VOLATILE (guard) = 1;
1281 TREE_USED (guard) = 1;
1282 expand_one_stack_var (guard);
1283 crtl->stack_protect_guard = guard;
1286 /* Prepare for expanding variables. */
1288 init_vars_expansion (void)
1292 /* Set TREE_USED on all variables in the local_decls. */
1293 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1296 /* Clear TREE_USED on all variables associated with a block scope. */
1297 clear_tree_used (DECL_INITIAL (current_function_decl));
1299 /* Initialize local stack smashing state. */
1300 has_protected_decls = false;
1301 has_short_buffer = false;
1304 /* Free up stack variable graph data. */
1306 fini_vars_expansion (void)
1308 size_t i, n = stack_vars_num;
1309 for (i = 0; i < n; i++)
1310 BITMAP_FREE (stack_vars[i].conflicts);
1311 XDELETEVEC (stack_vars);
1312 XDELETEVEC (stack_vars_sorted);
1314 stack_vars_alloc = stack_vars_num = 0;
1317 /* Make a fair guess for the size of the stack frame of the function
1318 in NODE. This doesn't have to be exact, the result is only used in
1319 the inline heuristics. So we don't want to run the full stack var
1320 packing algorithm (which is quadratic in the number of stack vars).
1321 Instead, we calculate the total size of all stack vars. This turns
1322 out to be a pretty fair estimate -- packing of stack vars doesn't
1323 happen very often. */
1326 estimated_stack_frame_size (struct cgraph_node *node)
1328 HOST_WIDE_INT size = 0;
1331 tree old_cur_fun_decl = current_function_decl;
1332 referenced_var_iterator rvi;
1333 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1335 current_function_decl = node->decl;
1338 gcc_checking_assert (gimple_referenced_vars (fn));
1339 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1340 size += expand_one_var (var, true, false);
1342 if (stack_vars_num > 0)
1344 /* Fake sorting the stack vars for account_stack_vars (). */
1345 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1346 for (i = 0; i < stack_vars_num; ++i)
1347 stack_vars_sorted[i] = i;
1348 size += account_stack_vars ();
1349 fini_vars_expansion ();
1352 current_function_decl = old_cur_fun_decl;
1356 /* Expand all variables used in the function. */
1359 expand_used_vars (void)
1361 tree var, outer_block = DECL_INITIAL (current_function_decl);
1362 VEC(tree,heap) *maybe_local_decls = NULL;
1366 /* Compute the phase of the stack frame for this function. */
1368 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1369 int off = STARTING_FRAME_OFFSET % align;
1370 frame_phase = off ? align - off : 0;
1373 init_vars_expansion ();
1375 for (i = 0; i < SA.map->num_partitions; i++)
1377 tree var = partition_to_var (SA.map, i);
1379 gcc_assert (is_gimple_reg (var));
1380 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1381 expand_one_var (var, true, true);
1384 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1385 contain the default def (representing the parm or result itself)
1386 we don't do anything here. But those which don't contain the
1387 default def (representing a temporary based on the parm/result)
1388 we need to allocate space just like for normal VAR_DECLs. */
1389 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1391 expand_one_var (var, true, true);
1392 gcc_assert (SA.partition_to_pseudo[i]);
1397 /* At this point all variables on the local_decls with TREE_USED
1398 set are not associated with any block scope. Lay them out. */
1400 len = VEC_length (tree, cfun->local_decls);
1401 FOR_EACH_LOCAL_DECL (cfun, i, var)
1403 bool expand_now = false;
1405 /* Expanded above already. */
1406 if (is_gimple_reg (var))
1408 TREE_USED (var) = 0;
1411 /* We didn't set a block for static or extern because it's hard
1412 to tell the difference between a global variable (re)declared
1413 in a local scope, and one that's really declared there to
1414 begin with. And it doesn't really matter much, since we're
1415 not giving them stack space. Expand them now. */
1416 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1419 /* If the variable is not associated with any block, then it
1420 was created by the optimizers, and could be live anywhere
1422 else if (TREE_USED (var))
1425 /* Finally, mark all variables on the list as used. We'll use
1426 this in a moment when we expand those associated with scopes. */
1427 TREE_USED (var) = 1;
1430 expand_one_var (var, true, true);
1433 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1435 rtx rtl = DECL_RTL_IF_SET (var);
1437 /* Keep artificial non-ignored vars in cfun->local_decls
1438 chain until instantiate_decls. */
1439 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1440 add_local_decl (cfun, var);
1441 else if (rtl == NULL_RTX)
1442 /* If rtl isn't set yet, which can happen e.g. with
1443 -fstack-protector, retry before returning from this
1445 VEC_safe_push (tree, heap, maybe_local_decls, var);
1449 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1451 +-----------------+-----------------+
1452 | ...processed... | ...duplicates...|
1453 +-----------------+-----------------+
1455 +-- LEN points here.
1457 We just want the duplicates, as those are the artificial
1458 non-ignored vars that we want to keep until instantiate_decls.
1459 Move them down and truncate the array. */
1460 if (!VEC_empty (tree, cfun->local_decls))
1461 VEC_block_remove (tree, cfun->local_decls, 0, len);
1463 /* At this point, all variables within the block tree with TREE_USED
1464 set are actually used by the optimized function. Lay them out. */
1465 expand_used_vars_for_block (outer_block, true);
1467 if (stack_vars_num > 0)
1469 /* Due to the way alias sets work, no variables with non-conflicting
1470 alias sets may be assigned the same address. Add conflicts to
1472 add_alias_set_conflicts ();
1474 /* If stack protection is enabled, we don't share space between
1475 vulnerable data and non-vulnerable data. */
1476 if (flag_stack_protect)
1477 add_stack_protection_conflicts ();
1479 /* Now that we have collected all stack variables, and have computed a
1480 minimal interference graph, attempt to save some stack space. */
1481 partition_stack_vars ();
1483 dump_stack_var_partition ();
1486 /* There are several conditions under which we should create a
1487 stack guard: protect-all, alloca used, protected decls present. */
1488 if (flag_stack_protect == 2
1489 || (flag_stack_protect
1490 && (cfun->calls_alloca || has_protected_decls)))
1491 create_stack_guard ();
1493 /* Assign rtl to each variable based on these partitions. */
1494 if (stack_vars_num > 0)
1496 /* Reorder decls to be protected by iterating over the variables
1497 array multiple times, and allocating out of each phase in turn. */
1498 /* ??? We could probably integrate this into the qsort we did
1499 earlier, such that we naturally see these variables first,
1500 and thus naturally allocate things in the right order. */
1501 if (has_protected_decls)
1503 /* Phase 1 contains only character arrays. */
1504 expand_stack_vars (stack_protect_decl_phase_1);
1506 /* Phase 2 contains other kinds of arrays. */
1507 if (flag_stack_protect == 2)
1508 expand_stack_vars (stack_protect_decl_phase_2);
1511 expand_stack_vars (NULL);
1513 fini_vars_expansion ();
1516 /* If there were any artificial non-ignored vars without rtl
1517 found earlier, see if deferred stack allocation hasn't assigned
1519 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1521 rtx rtl = DECL_RTL_IF_SET (var);
1523 /* Keep artificial non-ignored vars in cfun->local_decls
1524 chain until instantiate_decls. */
1525 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1526 add_local_decl (cfun, var);
1528 VEC_free (tree, heap, maybe_local_decls);
1530 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1531 if (STACK_ALIGNMENT_NEEDED)
1533 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1534 if (!FRAME_GROWS_DOWNWARD)
1535 frame_offset += align - 1;
1536 frame_offset &= -align;
1541 /* If we need to produce a detailed dump, print the tree representation
1542 for STMT to the dump file. SINCE is the last RTX after which the RTL
1543 generated for STMT should have been appended. */
1546 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1548 if (dump_file && (dump_flags & TDF_DETAILS))
1550 fprintf (dump_file, "\n;; ");
1551 print_gimple_stmt (dump_file, stmt, 0,
1552 TDF_SLIM | (dump_flags & TDF_LINENO));
1553 fprintf (dump_file, "\n");
1555 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1559 /* Maps the blocks that do not contain tree labels to rtx labels. */
1561 static struct pointer_map_t *lab_rtx_for_bb;
1563 /* Returns the label_rtx expression for a label starting basic block BB. */
1566 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1568 gimple_stmt_iterator gsi;
1573 if (bb->flags & BB_RTL)
1574 return block_label (bb);
1576 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1580 /* Find the tree label if it is present. */
1582 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1584 lab_stmt = gsi_stmt (gsi);
1585 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1588 lab = gimple_label_label (lab_stmt);
1589 if (DECL_NONLOCAL (lab))
1592 return label_rtx (lab);
1595 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1596 *elt = gen_label_rtx ();
1601 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1602 of a basic block where we just expanded the conditional at the end,
1603 possibly clean up the CFG and instruction sequence. LAST is the
1604 last instruction before the just emitted jump sequence. */
1607 maybe_cleanup_end_of_block (edge e, rtx last)
1609 /* Special case: when jumpif decides that the condition is
1610 trivial it emits an unconditional jump (and the necessary
1611 barrier). But we still have two edges, the fallthru one is
1612 wrong. purge_dead_edges would clean this up later. Unfortunately
1613 we have to insert insns (and split edges) before
1614 find_many_sub_basic_blocks and hence before purge_dead_edges.
1615 But splitting edges might create new blocks which depend on the
1616 fact that if there are two edges there's no barrier. So the
1617 barrier would get lost and verify_flow_info would ICE. Instead
1618 of auditing all edge splitters to care for the barrier (which
1619 normally isn't there in a cleaned CFG), fix it here. */
1620 if (BARRIER_P (get_last_insn ()))
1624 /* Now, we have a single successor block, if we have insns to
1625 insert on the remaining edge we potentially will insert
1626 it at the end of this block (if the dest block isn't feasible)
1627 in order to avoid splitting the edge. This insertion will take
1628 place in front of the last jump. But we might have emitted
1629 multiple jumps (conditional and one unconditional) to the
1630 same destination. Inserting in front of the last one then
1631 is a problem. See PR 40021. We fix this by deleting all
1632 jumps except the last unconditional one. */
1633 insn = PREV_INSN (get_last_insn ());
1634 /* Make sure we have an unconditional jump. Otherwise we're
1636 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1637 for (insn = PREV_INSN (insn); insn != last;)
1639 insn = PREV_INSN (insn);
1640 if (JUMP_P (NEXT_INSN (insn)))
1642 if (!any_condjump_p (NEXT_INSN (insn)))
1644 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1645 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1647 delete_insn (NEXT_INSN (insn));
1653 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1654 Returns a new basic block if we've terminated the current basic
1655 block and created a new one. */
1658 expand_gimple_cond (basic_block bb, gimple stmt)
1660 basic_block new_bb, dest;
1665 enum tree_code code;
1668 code = gimple_cond_code (stmt);
1669 op0 = gimple_cond_lhs (stmt);
1670 op1 = gimple_cond_rhs (stmt);
1671 /* We're sometimes presented with such code:
1675 This would expand to two comparisons which then later might
1676 be cleaned up by combine. But some pattern matchers like if-conversion
1677 work better when there's only one compare, so make up for this
1678 here as special exception if TER would have made the same change. */
1679 if (gimple_cond_single_var_p (stmt)
1681 && TREE_CODE (op0) == SSA_NAME
1682 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1684 gimple second = SSA_NAME_DEF_STMT (op0);
1685 if (gimple_code (second) == GIMPLE_ASSIGN)
1687 enum tree_code code2 = gimple_assign_rhs_code (second);
1688 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1691 op0 = gimple_assign_rhs1 (second);
1692 op1 = gimple_assign_rhs2 (second);
1694 /* If jumps are cheap turn some more codes into
1696 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1698 if ((code2 == BIT_AND_EXPR
1699 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1700 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1701 || code2 == TRUTH_AND_EXPR)
1703 code = TRUTH_ANDIF_EXPR;
1704 op0 = gimple_assign_rhs1 (second);
1705 op1 = gimple_assign_rhs2 (second);
1707 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1709 code = TRUTH_ORIF_EXPR;
1710 op0 = gimple_assign_rhs1 (second);
1711 op1 = gimple_assign_rhs2 (second);
1717 last2 = last = get_last_insn ();
1719 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1720 set_curr_insn_source_location (gimple_location (stmt));
1721 set_curr_insn_block (gimple_block (stmt));
1723 /* These flags have no purpose in RTL land. */
1724 true_edge->flags &= ~EDGE_TRUE_VALUE;
1725 false_edge->flags &= ~EDGE_FALSE_VALUE;
1727 /* We can either have a pure conditional jump with one fallthru edge or
1728 two-way jump that needs to be decomposed into two basic blocks. */
1729 if (false_edge->dest == bb->next_bb)
1731 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1732 true_edge->probability);
1733 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1734 if (true_edge->goto_locus)
1736 set_curr_insn_source_location (true_edge->goto_locus);
1737 set_curr_insn_block (true_edge->goto_block);
1738 true_edge->goto_locus = curr_insn_locator ();
1740 true_edge->goto_block = NULL;
1741 false_edge->flags |= EDGE_FALLTHRU;
1742 maybe_cleanup_end_of_block (false_edge, last);
1745 if (true_edge->dest == bb->next_bb)
1747 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1748 false_edge->probability);
1749 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1750 if (false_edge->goto_locus)
1752 set_curr_insn_source_location (false_edge->goto_locus);
1753 set_curr_insn_block (false_edge->goto_block);
1754 false_edge->goto_locus = curr_insn_locator ();
1756 false_edge->goto_block = NULL;
1757 true_edge->flags |= EDGE_FALLTHRU;
1758 maybe_cleanup_end_of_block (true_edge, last);
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1763 true_edge->probability);
1764 last = get_last_insn ();
1765 if (false_edge->goto_locus)
1767 set_curr_insn_source_location (false_edge->goto_locus);
1768 set_curr_insn_block (false_edge->goto_block);
1769 false_edge->goto_locus = curr_insn_locator ();
1771 false_edge->goto_block = NULL;
1772 emit_jump (label_rtx_for_bb (false_edge->dest));
1775 if (BARRIER_P (BB_END (bb)))
1776 BB_END (bb) = PREV_INSN (BB_END (bb));
1777 update_bb_for_insn (bb);
1779 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1780 dest = false_edge->dest;
1781 redirect_edge_succ (false_edge, new_bb);
1782 false_edge->flags |= EDGE_FALLTHRU;
1783 new_bb->count = false_edge->count;
1784 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1785 new_edge = make_edge (new_bb, dest, 0);
1786 new_edge->probability = REG_BR_PROB_BASE;
1787 new_edge->count = new_bb->count;
1788 if (BARRIER_P (BB_END (new_bb)))
1789 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1790 update_bb_for_insn (new_bb);
1792 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1794 if (true_edge->goto_locus)
1796 set_curr_insn_source_location (true_edge->goto_locus);
1797 set_curr_insn_block (true_edge->goto_block);
1798 true_edge->goto_locus = curr_insn_locator ();
1800 true_edge->goto_block = NULL;
1805 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1809 expand_call_stmt (gimple stmt)
1811 tree exp, decl, lhs;
1815 if (gimple_call_internal_p (stmt))
1817 expand_internal_call (stmt);
1821 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1823 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1824 decl = gimple_call_fndecl (stmt);
1825 builtin_p = decl && DECL_BUILT_IN (decl);
1827 /* If this is not a builtin function, the function type through which the
1828 call is made may be different from the type of the function. */
1831 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1832 CALL_EXPR_FN (exp));
1834 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1835 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1837 for (i = 0; i < gimple_call_num_args (stmt); i++)
1839 tree arg = gimple_call_arg (stmt, i);
1841 /* TER addresses into arguments of builtin functions so we have a
1842 chance to infer more correct alignment information. See PR39954. */
1844 && TREE_CODE (arg) == SSA_NAME
1845 && (def = get_gimple_for_ssa_name (arg))
1846 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1847 arg = gimple_assign_rhs1 (def);
1848 CALL_EXPR_ARG (exp, i) = arg;
1851 if (gimple_has_side_effects (stmt))
1852 TREE_SIDE_EFFECTS (exp) = 1;
1854 if (gimple_call_nothrow_p (stmt))
1855 TREE_NOTHROW (exp) = 1;
1857 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1858 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1860 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1861 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
1862 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
1863 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1865 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1866 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1867 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1868 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1869 TREE_BLOCK (exp) = gimple_block (stmt);
1871 /* Ensure RTL is created for debug args. */
1872 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1874 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1879 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
1881 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
1882 expand_debug_expr (dtemp);
1886 lhs = gimple_call_lhs (stmt);
1888 expand_assignment (lhs, exp, false);
1890 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1893 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1894 STMT that doesn't require special handling for outgoing edges. That
1895 is no tailcalls and no GIMPLE_COND. */
1898 expand_gimple_stmt_1 (gimple stmt)
1902 set_curr_insn_source_location (gimple_location (stmt));
1903 set_curr_insn_block (gimple_block (stmt));
1905 switch (gimple_code (stmt))
1908 op0 = gimple_goto_dest (stmt);
1909 if (TREE_CODE (op0) == LABEL_DECL)
1912 expand_computed_goto (op0);
1915 expand_label (gimple_label_label (stmt));
1918 case GIMPLE_PREDICT:
1924 expand_asm_stmt (stmt);
1927 expand_call_stmt (stmt);
1931 op0 = gimple_return_retval (stmt);
1933 if (op0 && op0 != error_mark_node)
1935 tree result = DECL_RESULT (current_function_decl);
1937 /* If we are not returning the current function's RESULT_DECL,
1938 build an assignment to it. */
1941 /* I believe that a function's RESULT_DECL is unique. */
1942 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1944 /* ??? We'd like to use simply expand_assignment here,
1945 but this fails if the value is of BLKmode but the return
1946 decl is a register. expand_return has special handling
1947 for this combination, which eventually should move
1948 to common code. See comments there. Until then, let's
1949 build a modify expression :-/ */
1950 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1955 expand_null_return ();
1957 expand_return (op0);
1962 tree lhs = gimple_assign_lhs (stmt);
1964 /* Tree expand used to fiddle with |= and &= of two bitfield
1965 COMPONENT_REFs here. This can't happen with gimple, the LHS
1966 of binary assigns must be a gimple reg. */
1968 if (TREE_CODE (lhs) != SSA_NAME
1969 || get_gimple_rhs_class (gimple_expr_code (stmt))
1970 == GIMPLE_SINGLE_RHS)
1972 tree rhs = gimple_assign_rhs1 (stmt);
1973 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1974 == GIMPLE_SINGLE_RHS);
1975 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1976 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1977 expand_assignment (lhs, rhs,
1978 gimple_assign_nontemporal_move_p (stmt));
1983 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1984 struct separate_ops ops;
1985 bool promoted = false;
1987 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1988 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1991 ops.code = gimple_assign_rhs_code (stmt);
1992 ops.type = TREE_TYPE (lhs);
1993 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1995 case GIMPLE_TERNARY_RHS:
1996 ops.op2 = gimple_assign_rhs3 (stmt);
1998 case GIMPLE_BINARY_RHS:
1999 ops.op1 = gimple_assign_rhs2 (stmt);
2001 case GIMPLE_UNARY_RHS:
2002 ops.op0 = gimple_assign_rhs1 (stmt);
2007 ops.location = gimple_location (stmt);
2009 /* If we want to use a nontemporal store, force the value to
2010 register first. If we store into a promoted register,
2011 don't directly expand to target. */
2012 temp = nontemporal || promoted ? NULL_RTX : target;
2013 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2020 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2021 /* If TEMP is a VOIDmode constant, use convert_modes to make
2022 sure that we properly convert it. */
2023 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2025 temp = convert_modes (GET_MODE (target),
2026 TYPE_MODE (ops.type),
2028 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2029 GET_MODE (target), temp, unsignedp);
2032 convert_move (SUBREG_REG (target), temp, unsignedp);
2034 else if (nontemporal && emit_storent_insn (target, temp))
2038 temp = force_operand (temp, target);
2040 emit_move_insn (target, temp);
2051 /* Expand one gimple statement STMT and return the last RTL instruction
2052 before any of the newly generated ones.
2054 In addition to generating the necessary RTL instructions this also
2055 sets REG_EH_REGION notes if necessary and sets the current source
2056 location for diagnostics. */
2059 expand_gimple_stmt (gimple stmt)
2061 location_t saved_location = input_location;
2062 rtx last = get_last_insn ();
2067 /* We need to save and restore the current source location so that errors
2068 discovered during expansion are emitted with the right location. But
2069 it would be better if the diagnostic routines used the source location
2070 embedded in the tree nodes rather than globals. */
2071 if (gimple_has_location (stmt))
2072 input_location = gimple_location (stmt);
2074 expand_gimple_stmt_1 (stmt);
2076 /* Free any temporaries used to evaluate this statement. */
2079 input_location = saved_location;
2081 /* Mark all insns that may trap. */
2082 lp_nr = lookup_stmt_eh_lp (stmt);
2086 for (insn = next_real_insn (last); insn;
2087 insn = next_real_insn (insn))
2089 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2090 /* If we want exceptions for non-call insns, any
2091 may_trap_p instruction may throw. */
2092 && GET_CODE (PATTERN (insn)) != CLOBBER
2093 && GET_CODE (PATTERN (insn)) != USE
2094 && insn_could_throw_p (insn))
2095 make_reg_eh_region_note (insn, 0, lp_nr);
2102 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2103 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2104 generated a tail call (something that might be denied by the ABI
2105 rules governing the call; see calls.c).
2107 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2108 can still reach the rest of BB. The case here is __builtin_sqrt,
2109 where the NaN result goes through the external function (with a
2110 tailcall) and the normal result happens via a sqrt instruction. */
2113 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2121 last2 = last = expand_gimple_stmt (stmt);
2123 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2124 if (CALL_P (last) && SIBLING_CALL_P (last))
2127 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2129 *can_fallthru = true;
2133 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2134 Any instructions emitted here are about to be deleted. */
2135 do_pending_stack_adjust ();
2137 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2138 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2139 EH or abnormal edges, we shouldn't have created a tail call in
2140 the first place. So it seems to me we should just be removing
2141 all edges here, or redirecting the existing fallthru edge to
2147 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2149 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2151 if (e->dest != EXIT_BLOCK_PTR)
2153 e->dest->count -= e->count;
2154 e->dest->frequency -= EDGE_FREQUENCY (e);
2155 if (e->dest->count < 0)
2157 if (e->dest->frequency < 0)
2158 e->dest->frequency = 0;
2161 probability += e->probability;
2168 /* This is somewhat ugly: the call_expr expander often emits instructions
2169 after the sibcall (to perform the function return). These confuse the
2170 find_many_sub_basic_blocks code, so we need to get rid of these. */
2171 last = NEXT_INSN (last);
2172 gcc_assert (BARRIER_P (last));
2174 *can_fallthru = false;
2175 while (NEXT_INSN (last))
2177 /* For instance an sqrt builtin expander expands if with
2178 sibcall in the then and label for `else`. */
2179 if (LABEL_P (NEXT_INSN (last)))
2181 *can_fallthru = true;
2184 delete_insn (NEXT_INSN (last));
2187 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2188 e->probability += probability;
2191 update_bb_for_insn (bb);
2193 if (NEXT_INSN (last))
2195 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2198 if (BARRIER_P (last))
2199 BB_END (bb) = PREV_INSN (last);
2202 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2207 /* Return the difference between the floor and the truncated result of
2208 a signed division by OP1 with remainder MOD. */
2210 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2212 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2213 return gen_rtx_IF_THEN_ELSE
2214 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2215 gen_rtx_IF_THEN_ELSE
2216 (mode, gen_rtx_LT (BImode,
2217 gen_rtx_DIV (mode, op1, mod),
2219 constm1_rtx, const0_rtx),
2223 /* Return the difference between the ceil and the truncated result of
2224 a signed division by OP1 with remainder MOD. */
2226 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2228 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2229 return gen_rtx_IF_THEN_ELSE
2230 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2231 gen_rtx_IF_THEN_ELSE
2232 (mode, gen_rtx_GT (BImode,
2233 gen_rtx_DIV (mode, op1, mod),
2235 const1_rtx, const0_rtx),
2239 /* Return the difference between the ceil and the truncated result of
2240 an unsigned division by OP1 with remainder MOD. */
2242 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2244 /* (mod != 0 ? 1 : 0) */
2245 return gen_rtx_IF_THEN_ELSE
2246 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2247 const1_rtx, const0_rtx);
2250 /* Return the difference between the rounded and the truncated result
2251 of a signed division by OP1 with remainder MOD. Halfway cases are
2252 rounded away from zero, rather than to the nearest even number. */
2254 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2256 /* (abs (mod) >= abs (op1) - abs (mod)
2257 ? (op1 / mod > 0 ? 1 : -1)
2259 return gen_rtx_IF_THEN_ELSE
2260 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2261 gen_rtx_MINUS (mode,
2262 gen_rtx_ABS (mode, op1),
2263 gen_rtx_ABS (mode, mod))),
2264 gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GT (BImode,
2266 gen_rtx_DIV (mode, op1, mod),
2268 const1_rtx, constm1_rtx),
2272 /* Return the difference between the rounded and the truncated result
2273 of a unsigned division by OP1 with remainder MOD. Halfway cases
2274 are rounded away from zero, rather than to the nearest even
2277 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2279 /* (mod >= op1 - mod ? 1 : 0) */
2280 return gen_rtx_IF_THEN_ELSE
2281 (mode, gen_rtx_GE (BImode, mod,
2282 gen_rtx_MINUS (mode, op1, mod)),
2283 const1_rtx, const0_rtx);
2286 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2290 convert_debug_memory_address (enum machine_mode mode, rtx x,
2293 enum machine_mode xmode = GET_MODE (x);
2295 #ifndef POINTERS_EXTEND_UNSIGNED
2296 gcc_assert (mode == Pmode
2297 || mode == targetm.addr_space.address_mode (as));
2298 gcc_assert (xmode == mode || xmode == VOIDmode);
2301 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2302 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2304 gcc_assert (mode == address_mode || mode == pointer_mode);
2306 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2309 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2310 x = simplify_gen_subreg (mode, x, xmode,
2311 subreg_lowpart_offset
2313 else if (POINTERS_EXTEND_UNSIGNED > 0)
2314 x = gen_rtx_ZERO_EXTEND (mode, x);
2315 else if (!POINTERS_EXTEND_UNSIGNED)
2316 x = gen_rtx_SIGN_EXTEND (mode, x);
2319 switch (GET_CODE (x))
2322 if ((SUBREG_PROMOTED_VAR_P (x)
2323 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2324 || (GET_CODE (SUBREG_REG (x)) == PLUS
2325 && REG_P (XEXP (SUBREG_REG (x), 0))
2326 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2327 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2328 && GET_MODE (SUBREG_REG (x)) == mode)
2329 return SUBREG_REG (x);
2332 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2333 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2336 temp = shallow_copy_rtx (x);
2337 PUT_MODE (temp, mode);
2340 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2342 temp = gen_rtx_CONST (mode, temp);
2346 if (CONST_INT_P (XEXP (x, 1)))
2348 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2350 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2356 /* Don't know how to express ptr_extend as operation in debug info. */
2359 #endif /* POINTERS_EXTEND_UNSIGNED */
2364 /* Return an RTX equivalent to the value of the parameter DECL. */
2367 expand_debug_parm_decl (tree decl)
2369 rtx incoming = DECL_INCOMING_RTL (decl);
2372 && GET_MODE (incoming) != BLKmode
2373 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2374 || (MEM_P (incoming)
2375 && REG_P (XEXP (incoming, 0))
2376 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2378 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2380 #ifdef HAVE_window_save
2381 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2382 If the target machine has an explicit window save instruction, the
2383 actual entry value is the corresponding OUTGOING_REGNO instead. */
2384 if (REG_P (incoming)
2385 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2387 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2388 OUTGOING_REGNO (REGNO (incoming)), 0);
2389 else if (MEM_P (incoming))
2391 rtx reg = XEXP (incoming, 0);
2392 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2394 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2395 incoming = replace_equiv_address_nv (incoming, reg);
2400 ENTRY_VALUE_EXP (rtl) = incoming;
2405 && GET_MODE (incoming) != BLKmode
2406 && !TREE_ADDRESSABLE (decl)
2408 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2409 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2410 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2411 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2417 /* Return an RTX equivalent to the value of the tree expression EXP. */
2420 expand_debug_expr (tree exp)
2422 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2423 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2424 enum machine_mode inner_mode = VOIDmode;
2425 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2428 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2430 case tcc_expression:
2431 switch (TREE_CODE (exp))
2435 case WIDEN_MULT_PLUS_EXPR:
2436 case WIDEN_MULT_MINUS_EXPR:
2440 case TRUTH_ANDIF_EXPR:
2441 case TRUTH_ORIF_EXPR:
2442 case TRUTH_AND_EXPR:
2444 case TRUTH_XOR_EXPR:
2447 case TRUTH_NOT_EXPR:
2456 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2463 case tcc_comparison:
2464 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2471 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2472 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2482 case tcc_exceptional:
2483 case tcc_declaration:
2489 switch (TREE_CODE (exp))
2492 if (!lookup_constant_def (exp))
2494 if (strlen (TREE_STRING_POINTER (exp)) + 1
2495 != (size_t) TREE_STRING_LENGTH (exp))
2497 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2498 op0 = gen_rtx_MEM (BLKmode, op0);
2499 set_mem_attributes (op0, exp, 0);
2502 /* Fall through... */
2507 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2511 gcc_assert (COMPLEX_MODE_P (mode));
2512 op0 = expand_debug_expr (TREE_REALPART (exp));
2513 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2514 return gen_rtx_CONCAT (mode, op0, op1);
2516 case DEBUG_EXPR_DECL:
2517 op0 = DECL_RTL_IF_SET (exp);
2522 op0 = gen_rtx_DEBUG_EXPR (mode);
2523 DEBUG_EXPR_TREE_DECL (op0) = exp;
2524 SET_DECL_RTL (exp, op0);
2534 op0 = DECL_RTL_IF_SET (exp);
2536 /* This decl was probably optimized away. */
2539 if (TREE_CODE (exp) != VAR_DECL
2540 || DECL_EXTERNAL (exp)
2541 || !TREE_STATIC (exp)
2543 || DECL_HARD_REGISTER (exp)
2544 || DECL_IN_CONSTANT_POOL (exp)
2545 || mode == VOIDmode)
2548 op0 = make_decl_rtl_for_debug (exp);
2550 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2551 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2555 op0 = copy_rtx (op0);
2557 if (GET_MODE (op0) == BLKmode
2558 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2559 below would ICE. While it is likely a FE bug,
2560 try to be robust here. See PR43166. */
2562 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2564 gcc_assert (MEM_P (op0));
2565 op0 = adjust_address_nv (op0, mode, 0);
2576 inner_mode = GET_MODE (op0);
2578 if (mode == inner_mode)
2581 if (inner_mode == VOIDmode)
2583 if (TREE_CODE (exp) == SSA_NAME)
2584 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2586 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2587 if (mode == inner_mode)
2591 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2593 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2594 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2595 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2596 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2598 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2600 else if (FLOAT_MODE_P (mode))
2602 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2603 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2604 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2606 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2608 else if (FLOAT_MODE_P (inner_mode))
2611 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2613 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2615 else if (CONSTANT_P (op0)
2616 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2617 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2618 subreg_lowpart_offset (mode,
2620 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2621 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2623 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2625 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2631 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2633 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2634 TREE_OPERAND (exp, 0),
2635 TREE_OPERAND (exp, 1));
2637 return expand_debug_expr (newexp);
2641 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2645 if (TREE_CODE (exp) == MEM_REF)
2647 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2648 || (GET_CODE (op0) == PLUS
2649 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2650 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2651 Instead just use get_inner_reference. */
2654 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2655 if (!op1 || !CONST_INT_P (op1))
2658 op0 = plus_constant (op0, INTVAL (op1));
2661 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2662 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2664 as = ADDR_SPACE_GENERIC;
2666 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2668 if (op0 == NULL_RTX)
2671 op0 = gen_rtx_MEM (mode, op0);
2672 set_mem_attributes (op0, exp, 0);
2673 if (TREE_CODE (exp) == MEM_REF
2674 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2675 set_mem_expr (op0, NULL_TREE);
2676 set_mem_addr_space (op0, as);
2680 case TARGET_MEM_REF:
2681 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2682 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2685 op0 = expand_debug_expr
2686 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2690 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2691 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2693 as = ADDR_SPACE_GENERIC;
2695 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2697 if (op0 == NULL_RTX)
2700 op0 = gen_rtx_MEM (mode, op0);
2702 set_mem_attributes (op0, exp, 0);
2703 set_mem_addr_space (op0, as);
2709 case ARRAY_RANGE_REF:
2714 case VIEW_CONVERT_EXPR:
2716 enum machine_mode mode1;
2717 HOST_WIDE_INT bitsize, bitpos;
2720 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2721 &mode1, &unsignedp, &volatilep, false);
2727 orig_op0 = op0 = expand_debug_expr (tem);
2734 enum machine_mode addrmode, offmode;
2739 op0 = XEXP (op0, 0);
2740 addrmode = GET_MODE (op0);
2741 if (addrmode == VOIDmode)
2744 op1 = expand_debug_expr (offset);
2748 offmode = GET_MODE (op1);
2749 if (offmode == VOIDmode)
2750 offmode = TYPE_MODE (TREE_TYPE (offset));
2752 if (addrmode != offmode)
2753 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2754 subreg_lowpart_offset (addrmode,
2757 /* Don't use offset_address here, we don't need a
2758 recognizable address, and we don't want to generate
2760 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2766 if (mode1 == VOIDmode)
2768 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2769 if (bitpos >= BITS_PER_UNIT)
2771 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2772 bitpos %= BITS_PER_UNIT;
2774 else if (bitpos < 0)
2777 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2778 op0 = adjust_address_nv (op0, mode1, units);
2779 bitpos += units * BITS_PER_UNIT;
2781 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2782 op0 = adjust_address_nv (op0, mode, 0);
2783 else if (GET_MODE (op0) != mode1)
2784 op0 = adjust_address_nv (op0, mode1, 0);
2786 op0 = copy_rtx (op0);
2787 if (op0 == orig_op0)
2788 op0 = shallow_copy_rtx (op0);
2789 set_mem_attributes (op0, exp, 0);
2792 if (bitpos == 0 && mode == GET_MODE (op0))
2798 if (GET_MODE (op0) == BLKmode)
2801 if ((bitpos % BITS_PER_UNIT) == 0
2802 && bitsize == GET_MODE_BITSIZE (mode1))
2804 enum machine_mode opmode = GET_MODE (op0);
2806 if (opmode == VOIDmode)
2807 opmode = TYPE_MODE (TREE_TYPE (tem));
2809 /* This condition may hold if we're expanding the address
2810 right past the end of an array that turned out not to
2811 be addressable (i.e., the address was only computed in
2812 debug stmts). The gen_subreg below would rightfully
2813 crash, and the address doesn't really exist, so just
2815 if (bitpos >= GET_MODE_BITSIZE (opmode))
2818 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2819 return simplify_gen_subreg (mode, op0, opmode,
2820 bitpos / BITS_PER_UNIT);
2823 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2824 && TYPE_UNSIGNED (TREE_TYPE (exp))
2826 : ZERO_EXTRACT, mode,
2827 GET_MODE (op0) != VOIDmode
2829 : TYPE_MODE (TREE_TYPE (tem)),
2830 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2834 return simplify_gen_unary (ABS, mode, op0, mode);
2837 return simplify_gen_unary (NEG, mode, op0, mode);
2840 return simplify_gen_unary (NOT, mode, op0, mode);
2843 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2845 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2848 case FIX_TRUNC_EXPR:
2849 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2852 case POINTER_PLUS_EXPR:
2853 /* For the rare target where pointers are not the same size as
2854 size_t, we need to check for mis-matched modes and correct
2857 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2858 && GET_MODE (op0) != GET_MODE (op1))
2860 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2861 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2864 /* We always sign-extend, regardless of the signedness of
2865 the operand, because the operand is always unsigned
2866 here even if the original C expression is signed. */
2867 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2872 return simplify_gen_binary (PLUS, mode, op0, op1);
2875 return simplify_gen_binary (MINUS, mode, op0, op1);
2878 return simplify_gen_binary (MULT, mode, op0, op1);
2881 case TRUNC_DIV_EXPR:
2882 case EXACT_DIV_EXPR:
2884 return simplify_gen_binary (UDIV, mode, op0, op1);
2886 return simplify_gen_binary (DIV, mode, op0, op1);
2888 case TRUNC_MOD_EXPR:
2889 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
2891 case FLOOR_DIV_EXPR:
2893 return simplify_gen_binary (UDIV, mode, op0, op1);
2896 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2897 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2898 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2899 return simplify_gen_binary (PLUS, mode, div, adj);
2902 case FLOOR_MOD_EXPR:
2904 return simplify_gen_binary (UMOD, mode, op0, op1);
2907 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2908 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2909 adj = simplify_gen_unary (NEG, mode,
2910 simplify_gen_binary (MULT, mode, adj, op1),
2912 return simplify_gen_binary (PLUS, mode, mod, adj);
2918 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2919 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2920 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2921 return simplify_gen_binary (PLUS, mode, div, adj);
2925 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2926 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2927 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2928 return simplify_gen_binary (PLUS, mode, div, adj);
2934 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2935 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2936 adj = simplify_gen_unary (NEG, mode,
2937 simplify_gen_binary (MULT, mode, adj, op1),
2939 return simplify_gen_binary (PLUS, mode, mod, adj);
2943 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2944 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2945 adj = simplify_gen_unary (NEG, mode,
2946 simplify_gen_binary (MULT, mode, adj, op1),
2948 return simplify_gen_binary (PLUS, mode, mod, adj);
2951 case ROUND_DIV_EXPR:
2954 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2955 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2956 rtx adj = round_udiv_adjust (mode, mod, op1);
2957 return simplify_gen_binary (PLUS, mode, div, adj);
2961 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2962 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2963 rtx adj = round_sdiv_adjust (mode, mod, op1);
2964 return simplify_gen_binary (PLUS, mode, div, adj);
2967 case ROUND_MOD_EXPR:
2970 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2971 rtx adj = round_udiv_adjust (mode, mod, op1);
2972 adj = simplify_gen_unary (NEG, mode,
2973 simplify_gen_binary (MULT, mode, adj, op1),
2975 return simplify_gen_binary (PLUS, mode, mod, adj);
2979 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2980 rtx adj = round_sdiv_adjust (mode, mod, op1);
2981 adj = simplify_gen_unary (NEG, mode,
2982 simplify_gen_binary (MULT, mode, adj, op1),
2984 return simplify_gen_binary (PLUS, mode, mod, adj);
2988 return simplify_gen_binary (ASHIFT, mode, op0, op1);
2992 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
2994 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
2997 return simplify_gen_binary (ROTATE, mode, op0, op1);
3000 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3003 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3006 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3009 case TRUTH_AND_EXPR:
3010 return simplify_gen_binary (AND, mode, op0, op1);
3014 return simplify_gen_binary (IOR, mode, op0, op1);
3017 case TRUTH_XOR_EXPR:
3018 return simplify_gen_binary (XOR, mode, op0, op1);
3020 case TRUTH_ANDIF_EXPR:
3021 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3023 case TRUTH_ORIF_EXPR:
3024 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3026 case TRUTH_NOT_EXPR:
3027 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3030 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3034 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3038 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3042 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3046 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3049 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3051 case UNORDERED_EXPR:
3052 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3055 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3058 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3061 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3064 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3067 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3070 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3073 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3076 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3079 gcc_assert (COMPLEX_MODE_P (mode));
3080 if (GET_MODE (op0) == VOIDmode)
3081 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3082 if (GET_MODE (op1) == VOIDmode)
3083 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3084 return gen_rtx_CONCAT (mode, op0, op1);
3087 if (GET_CODE (op0) == CONCAT)
3088 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3089 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3091 GET_MODE_INNER (mode)));
3094 enum machine_mode imode = GET_MODE_INNER (mode);
3099 re = adjust_address_nv (op0, imode, 0);
3100 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3104 enum machine_mode ifmode = int_mode_for_mode (mode);
3105 enum machine_mode ihmode = int_mode_for_mode (imode);
3107 if (ifmode == BLKmode || ihmode == BLKmode)
3109 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3112 re = gen_rtx_SUBREG (ifmode, re, 0);
3113 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3114 if (imode != ihmode)
3115 re = gen_rtx_SUBREG (imode, re, 0);
3116 im = copy_rtx (op0);
3118 im = gen_rtx_SUBREG (ifmode, im, 0);
3119 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3120 if (imode != ihmode)
3121 im = gen_rtx_SUBREG (imode, im, 0);
3123 im = gen_rtx_NEG (imode, im);
3124 return gen_rtx_CONCAT (mode, re, im);
3128 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3129 if (!op0 || !MEM_P (op0))
3131 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3132 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3133 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3134 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3135 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3137 if (handled_component_p (TREE_OPERAND (exp, 0)))
3139 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3141 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3142 &bitoffset, &bitsize, &maxsize);
3143 if ((TREE_CODE (decl) == VAR_DECL
3144 || TREE_CODE (decl) == PARM_DECL
3145 || TREE_CODE (decl) == RESULT_DECL)
3146 && !TREE_ADDRESSABLE (decl)
3147 && (bitoffset % BITS_PER_UNIT) == 0
3149 && bitsize == maxsize)
3150 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3151 bitoffset / BITS_PER_UNIT);
3157 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3158 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3163 exp = build_constructor_from_list (TREE_TYPE (exp),
3164 TREE_VECTOR_CST_ELTS (exp));
3168 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3173 op0 = gen_rtx_CONCATN
3174 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3176 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3178 op1 = expand_debug_expr (val);
3181 XVECEXP (op0, 0, i) = op1;
3184 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3186 op1 = expand_debug_expr
3187 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3192 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3193 XVECEXP (op0, 0, i) = op1;
3199 goto flag_unsupported;
3202 /* ??? Maybe handle some builtins? */
3207 gimple g = get_gimple_for_ssa_name (exp);
3210 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3216 int part = var_to_partition (SA.map, exp);
3218 if (part == NO_PARTITION)
3220 /* If this is a reference to an incoming value of parameter
3221 that is never used in the code or where the incoming
3222 value is never used in the code, use PARM_DECL's
3224 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3225 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3227 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3230 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3237 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3239 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3247 /* Vector stuff. For most of the codes we don't have rtl codes. */
3248 case REALIGN_LOAD_EXPR:
3249 case REDUC_MAX_EXPR:
3250 case REDUC_MIN_EXPR:
3251 case REDUC_PLUS_EXPR:
3253 case VEC_EXTRACT_EVEN_EXPR:
3254 case VEC_EXTRACT_ODD_EXPR:
3255 case VEC_INTERLEAVE_HIGH_EXPR:
3256 case VEC_INTERLEAVE_LOW_EXPR:
3257 case VEC_LSHIFT_EXPR:
3258 case VEC_PACK_FIX_TRUNC_EXPR:
3259 case VEC_PACK_SAT_EXPR:
3260 case VEC_PACK_TRUNC_EXPR:
3261 case VEC_RSHIFT_EXPR:
3262 case VEC_UNPACK_FLOAT_HI_EXPR:
3263 case VEC_UNPACK_FLOAT_LO_EXPR:
3264 case VEC_UNPACK_HI_EXPR:
3265 case VEC_UNPACK_LO_EXPR:
3266 case VEC_WIDEN_MULT_HI_EXPR:
3267 case VEC_WIDEN_MULT_LO_EXPR:
3271 case ADDR_SPACE_CONVERT_EXPR:
3272 case FIXED_CONVERT_EXPR:
3274 case WITH_SIZE_EXPR:
3278 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3279 && SCALAR_INT_MODE_P (mode))
3282 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3284 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3287 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3289 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3291 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3292 return simplify_gen_binary (PLUS, mode, op0, op2);
3296 case WIDEN_MULT_EXPR:
3297 case WIDEN_MULT_PLUS_EXPR:
3298 case WIDEN_MULT_MINUS_EXPR:
3299 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3300 && SCALAR_INT_MODE_P (mode))
3302 inner_mode = GET_MODE (op0);
3303 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3304 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3306 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3307 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3308 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3310 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3311 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3312 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3314 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3315 return simplify_gen_binary (PLUS, mode, op0, op2);
3317 return simplify_gen_binary (MINUS, mode, op2, op0);
3321 case WIDEN_SUM_EXPR:
3322 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3323 && SCALAR_INT_MODE_P (mode))
3326 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3328 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3330 return simplify_gen_binary (PLUS, mode, op0, op1);
3335 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3339 #ifdef ENABLE_CHECKING
3348 /* Return an RTX equivalent to the source bind value of the tree expression
3352 expand_debug_source_expr (tree exp)
3355 enum machine_mode mode = VOIDmode, inner_mode;
3357 switch (TREE_CODE (exp))
3361 mode = DECL_MODE (exp);
3362 op0 = expand_debug_parm_decl (exp);
3365 /* See if this isn't an argument that has been completely
3367 if (!DECL_RTL_SET_P (exp)
3368 && !DECL_INCOMING_RTL (exp)
3369 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3372 if (DECL_ABSTRACT_ORIGIN (exp))
3373 aexp = DECL_ABSTRACT_ORIGIN (exp);
3374 if (DECL_CONTEXT (aexp)
3375 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3377 VEC(tree, gc) **debug_args;
3380 #ifdef ENABLE_CHECKING
3382 for (parm = DECL_ARGUMENTS (current_function_decl);
3383 parm; parm = DECL_CHAIN (parm))
3384 gcc_assert (parm != exp
3385 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3387 debug_args = decl_debug_args_lookup (current_function_decl);
3388 if (debug_args != NULL)
3390 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3393 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3403 if (op0 == NULL_RTX)
3406 inner_mode = GET_MODE (op0);
3407 if (mode == inner_mode)
3410 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3412 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3413 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3414 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3415 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3417 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3419 else if (FLOAT_MODE_P (mode))
3421 else if (FLOAT_MODE_P (inner_mode))
3423 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3424 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3426 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3428 else if (CONSTANT_P (op0)
3429 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3430 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3431 subreg_lowpart_offset (mode, inner_mode));
3432 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3433 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3435 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3440 /* Expand the _LOCs in debug insns. We run this after expanding all
3441 regular insns, so that any variables referenced in the function
3442 will have their DECL_RTLs set. */
3445 expand_debug_locations (void)
3448 rtx last = get_last_insn ();
3449 int save_strict_alias = flag_strict_aliasing;
3451 /* New alias sets while setting up memory attributes cause
3452 -fcompare-debug failures, even though it doesn't bring about any
3454 flag_strict_aliasing = 0;
3456 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3457 if (DEBUG_INSN_P (insn))
3459 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3461 enum machine_mode mode;
3463 if (value == NULL_TREE)
3467 if (INSN_VAR_LOCATION_STATUS (insn)
3468 == VAR_INIT_STATUS_UNINITIALIZED)
3469 val = expand_debug_source_expr (value);
3471 val = expand_debug_expr (value);
3472 gcc_assert (last == get_last_insn ());
3476 val = gen_rtx_UNKNOWN_VAR_LOC ();
3479 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3481 gcc_assert (mode == GET_MODE (val)
3482 || (GET_MODE (val) == VOIDmode
3483 && (CONST_INT_P (val)
3484 || GET_CODE (val) == CONST_FIXED
3485 || GET_CODE (val) == CONST_DOUBLE
3486 || GET_CODE (val) == LABEL_REF)));
3489 INSN_VAR_LOCATION_LOC (insn) = val;
3492 flag_strict_aliasing = save_strict_alias;
3495 /* Expand basic block BB from GIMPLE trees to RTL. */
3498 expand_gimple_basic_block (basic_block bb)
3500 gimple_stmt_iterator gsi;
3509 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3512 /* Note that since we are now transitioning from GIMPLE to RTL, we
3513 cannot use the gsi_*_bb() routines because they expect the basic
3514 block to be in GIMPLE, instead of RTL. Therefore, we need to
3515 access the BB sequence directly. */
3516 stmts = bb_seq (bb);
3517 bb->il.gimple = NULL;
3518 rtl_profile_for_bb (bb);
3519 init_rtl_bb_info (bb);
3520 bb->flags |= BB_RTL;
3522 /* Remove the RETURN_EXPR if we may fall though to the exit
3524 gsi = gsi_last (stmts);
3525 if (!gsi_end_p (gsi)
3526 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3528 gimple ret_stmt = gsi_stmt (gsi);
3530 gcc_assert (single_succ_p (bb));
3531 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3533 if (bb->next_bb == EXIT_BLOCK_PTR
3534 && !gimple_return_retval (ret_stmt))
3536 gsi_remove (&gsi, false);
3537 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3541 gsi = gsi_start (stmts);
3542 if (!gsi_end_p (gsi))
3544 stmt = gsi_stmt (gsi);
3545 if (gimple_code (stmt) != GIMPLE_LABEL)
3549 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3553 last = get_last_insn ();
3557 expand_gimple_stmt (stmt);
3562 emit_label ((rtx) *elt);
3564 /* Java emits line number notes in the top of labels.
3565 ??? Make this go away once line number notes are obsoleted. */
3566 BB_HEAD (bb) = NEXT_INSN (last);
3567 if (NOTE_P (BB_HEAD (bb)))
3568 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3569 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3571 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3574 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3576 NOTE_BASIC_BLOCK (note) = bb;
3578 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3582 stmt = gsi_stmt (gsi);
3584 /* If this statement is a non-debug one, and we generate debug
3585 insns, then this one might be the last real use of a TERed
3586 SSA_NAME, but where there are still some debug uses further
3587 down. Expanding the current SSA name in such further debug
3588 uses by their RHS might lead to wrong debug info, as coalescing
3589 might make the operands of such RHS be placed into the same
3590 pseudo as something else. Like so:
3591 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3595 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3596 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3597 the write to a_2 would actually have clobbered the place which
3600 So, instead of that, we recognize the situation, and generate
3601 debug temporaries at the last real use of TERed SSA names:
3608 if (MAY_HAVE_DEBUG_INSNS
3610 && !is_gimple_debug (stmt))
3616 location_t sloc = get_curr_insn_source_location ();
3617 tree sblock = get_curr_insn_block ();
3619 /* Look for SSA names that have their last use here (TERed
3620 names always have only one real use). */
3621 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3622 if ((def = get_gimple_for_ssa_name (op)))
3624 imm_use_iterator imm_iter;
3625 use_operand_p use_p;
3626 bool have_debug_uses = false;
3628 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3630 if (gimple_debug_bind_p (USE_STMT (use_p)))
3632 have_debug_uses = true;
3637 if (have_debug_uses)
3639 /* OP is a TERed SSA name, with DEF it's defining
3640 statement, and where OP is used in further debug
3641 instructions. Generate a debug temporary, and
3642 replace all uses of OP in debug insns with that
3645 tree value = gimple_assign_rhs_to_tree (def);
3646 tree vexpr = make_node (DEBUG_EXPR_DECL);
3648 enum machine_mode mode;
3650 set_curr_insn_source_location (gimple_location (def));
3651 set_curr_insn_block (gimple_block (def));
3653 DECL_ARTIFICIAL (vexpr) = 1;
3654 TREE_TYPE (vexpr) = TREE_TYPE (value);
3656 mode = DECL_MODE (value);
3658 mode = TYPE_MODE (TREE_TYPE (value));
3659 DECL_MODE (vexpr) = mode;
3661 val = gen_rtx_VAR_LOCATION
3662 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3664 emit_debug_insn (val);
3666 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3668 if (!gimple_debug_bind_p (debugstmt))
3671 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3672 SET_USE (use_p, vexpr);
3674 update_stmt (debugstmt);
3678 set_curr_insn_source_location (sloc);
3679 set_curr_insn_block (sblock);
3682 currently_expanding_gimple_stmt = stmt;
3684 /* Expand this statement, then evaluate the resulting RTL and
3685 fixup the CFG accordingly. */
3686 if (gimple_code (stmt) == GIMPLE_COND)
3688 new_bb = expand_gimple_cond (bb, stmt);
3692 else if (gimple_debug_bind_p (stmt))
3694 location_t sloc = get_curr_insn_source_location ();
3695 tree sblock = get_curr_insn_block ();
3696 gimple_stmt_iterator nsi = gsi;
3700 tree var = gimple_debug_bind_get_var (stmt);
3703 enum machine_mode mode;
3705 if (gimple_debug_bind_has_value_p (stmt))
3706 value = gimple_debug_bind_get_value (stmt);
3710 last = get_last_insn ();
3712 set_curr_insn_source_location (gimple_location (stmt));
3713 set_curr_insn_block (gimple_block (stmt));
3716 mode = DECL_MODE (var);
3718 mode = TYPE_MODE (TREE_TYPE (var));
3720 val = gen_rtx_VAR_LOCATION
3721 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3723 emit_debug_insn (val);
3725 if (dump_file && (dump_flags & TDF_DETAILS))
3727 /* We can't dump the insn with a TREE where an RTX
3729 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3730 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3731 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3734 /* In order not to generate too many debug temporaries,
3735 we delink all uses of debug statements we already expanded.
3736 Therefore debug statements between definition and real
3737 use of TERed SSA names will continue to use the SSA name,
3738 and not be replaced with debug temps. */
3739 delink_stmt_imm_use (stmt);
3743 if (gsi_end_p (nsi))
3745 stmt = gsi_stmt (nsi);
3746 if (!gimple_debug_bind_p (stmt))
3750 set_curr_insn_source_location (sloc);
3751 set_curr_insn_block (sblock);
3753 else if (gimple_debug_source_bind_p (stmt))
3755 location_t sloc = get_curr_insn_source_location ();
3756 tree sblock = get_curr_insn_block ();
3757 tree var = gimple_debug_source_bind_get_var (stmt);
3758 tree value = gimple_debug_source_bind_get_value (stmt);
3760 enum machine_mode mode;
3762 last = get_last_insn ();
3764 set_curr_insn_source_location (gimple_location (stmt));
3765 set_curr_insn_block (gimple_block (stmt));
3767 mode = DECL_MODE (var);
3769 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3770 VAR_INIT_STATUS_UNINITIALIZED);
3772 emit_debug_insn (val);
3774 if (dump_file && (dump_flags & TDF_DETAILS))
3776 /* We can't dump the insn with a TREE where an RTX
3778 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3779 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3780 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3783 set_curr_insn_source_location (sloc);
3784 set_curr_insn_block (sblock);
3788 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3791 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3802 def_operand_p def_p;
3803 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3807 /* Ignore this stmt if it is in the list of
3808 replaceable expressions. */
3810 && bitmap_bit_p (SA.values,
3811 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3814 last = expand_gimple_stmt (stmt);
3815 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3820 currently_expanding_gimple_stmt = NULL;
3822 /* Expand implicit goto and convert goto_locus. */
3823 FOR_EACH_EDGE (e, ei, bb->succs)
3825 if (e->goto_locus && e->goto_block)
3827 set_curr_insn_source_location (e->goto_locus);
3828 set_curr_insn_block (e->goto_block);
3829 e->goto_locus = curr_insn_locator ();
3831 e->goto_block = NULL;
3832 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3834 emit_jump (label_rtx_for_bb (e->dest));
3835 e->flags &= ~EDGE_FALLTHRU;
3839 /* Expanded RTL can create a jump in the last instruction of block.
3840 This later might be assumed to be a jump to successor and break edge insertion.
3841 We need to insert dummy move to prevent this. PR41440. */
3842 if (single_succ_p (bb)
3843 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3844 && (last = get_last_insn ())
3847 rtx dummy = gen_reg_rtx (SImode);
3848 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3851 do_pending_stack_adjust ();
3853 /* Find the block tail. The last insn in the block is the insn
3854 before a barrier and/or table jump insn. */
3855 last = get_last_insn ();
3856 if (BARRIER_P (last))
3857 last = PREV_INSN (last);
3858 if (JUMP_TABLE_DATA_P (last))
3859 last = PREV_INSN (PREV_INSN (last));
3862 update_bb_for_insn (bb);
3868 /* Create a basic block for initialization code. */
3871 construct_init_block (void)
3873 basic_block init_block, first_block;
3877 /* Multiple entry points not supported yet. */
3878 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3879 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3880 init_rtl_bb_info (EXIT_BLOCK_PTR);
3881 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3882 EXIT_BLOCK_PTR->flags |= BB_RTL;
3884 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3886 /* When entry edge points to first basic block, we don't need jump,
3887 otherwise we have to jump into proper target. */
3888 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3890 tree label = gimple_block_label (e->dest);
3892 emit_jump (label_rtx (label));
3896 flags = EDGE_FALLTHRU;
3898 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3901 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3902 init_block->count = ENTRY_BLOCK_PTR->count;
3905 first_block = e->dest;
3906 redirect_edge_succ (e, init_block);
3907 e = make_edge (init_block, first_block, flags);
3910 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3911 e->probability = REG_BR_PROB_BASE;
3912 e->count = ENTRY_BLOCK_PTR->count;
3914 update_bb_for_insn (init_block);
3918 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3919 found in the block tree. */
3922 set_block_levels (tree block, int level)
3926 BLOCK_NUMBER (block) = level;
3927 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3928 block = BLOCK_CHAIN (block);
3932 /* Create a block containing landing pads and similar stuff. */
3935 construct_exit_block (void)
3937 rtx head = get_last_insn ();
3939 basic_block exit_block;
3943 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3945 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3947 /* Make sure the locus is set to the end of the function, so that
3948 epilogue line numbers and warnings are set properly. */
3949 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3950 input_location = cfun->function_end_locus;
3952 /* The following insns belong to the top scope. */
3953 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3955 /* Generate rtl for function exit. */
3956 expand_function_end ();
3958 end = get_last_insn ();
3961 /* While emitting the function end we could move end of the last basic block.
3963 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3964 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3965 head = NEXT_INSN (head);
3966 exit_block = create_basic_block (NEXT_INSN (head), end,
3967 EXIT_BLOCK_PTR->prev_bb);
3968 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3969 exit_block->count = EXIT_BLOCK_PTR->count;
3972 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3974 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3975 if (!(e->flags & EDGE_ABNORMAL))
3976 redirect_edge_succ (e, exit_block);
3981 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3982 e->probability = REG_BR_PROB_BASE;
3983 e->count = EXIT_BLOCK_PTR->count;
3984 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3987 e->count -= e2->count;
3988 exit_block->count -= e2->count;
3989 exit_block->frequency -= EDGE_FREQUENCY (e2);
3993 if (exit_block->count < 0)
3994 exit_block->count = 0;
3995 if (exit_block->frequency < 0)
3996 exit_block->frequency = 0;
3997 update_bb_for_insn (exit_block);
4000 /* Helper function for discover_nonconstant_array_refs.
4001 Look for ARRAY_REF nodes with non-constant indexes and mark them
4005 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4006 void *data ATTRIBUTE_UNUSED)
4010 if (IS_TYPE_OR_DECL_P (t))
4012 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4014 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4015 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4016 && (!TREE_OPERAND (t, 2)
4017 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4018 || (TREE_CODE (t) == COMPONENT_REF
4019 && (!TREE_OPERAND (t,2)
4020 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4021 || TREE_CODE (t) == BIT_FIELD_REF
4022 || TREE_CODE (t) == REALPART_EXPR
4023 || TREE_CODE (t) == IMAGPART_EXPR
4024 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4025 || CONVERT_EXPR_P (t))
4026 t = TREE_OPERAND (t, 0);
4028 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4030 t = get_base_address (t);
4032 && DECL_MODE (t) != BLKmode)
4033 TREE_ADDRESSABLE (t) = 1;
4042 /* RTL expansion is not able to compile array references with variable
4043 offsets for arrays stored in single register. Discover such
4044 expressions and mark variables as addressable to avoid this
4048 discover_nonconstant_array_refs (void)
4051 gimple_stmt_iterator gsi;
4054 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4056 gimple stmt = gsi_stmt (gsi);
4057 if (!is_gimple_debug (stmt))
4058 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4062 /* This function sets crtl->args.internal_arg_pointer to a virtual
4063 register if DRAP is needed. Local register allocator will replace
4064 virtual_incoming_args_rtx with the virtual register. */
4067 expand_stack_alignment (void)
4070 unsigned int preferred_stack_boundary;
4072 if (! SUPPORTS_STACK_ALIGNMENT)
4075 if (cfun->calls_alloca
4076 || cfun->has_nonlocal_label
4077 || crtl->has_nonlocal_goto)
4078 crtl->need_drap = true;
4080 /* Call update_stack_boundary here again to update incoming stack
4081 boundary. It may set incoming stack alignment to a different
4082 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4083 use the minimum incoming stack alignment to check if it is OK
4084 to perform sibcall optimization since sibcall optimization will
4085 only align the outgoing stack to incoming stack boundary. */
4086 if (targetm.calls.update_stack_boundary)
4087 targetm.calls.update_stack_boundary ();
4089 /* The incoming stack frame has to be aligned at least at
4090 parm_stack_boundary. */
4091 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4093 /* Update crtl->stack_alignment_estimated and use it later to align
4094 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4095 exceptions since callgraph doesn't collect incoming stack alignment
4097 if (cfun->can_throw_non_call_exceptions
4098 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4099 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4101 preferred_stack_boundary = crtl->preferred_stack_boundary;
4102 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4103 crtl->stack_alignment_estimated = preferred_stack_boundary;
4104 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4105 crtl->stack_alignment_needed = preferred_stack_boundary;
4107 gcc_assert (crtl->stack_alignment_needed
4108 <= crtl->stack_alignment_estimated);
4110 crtl->stack_realign_needed
4111 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4112 crtl->stack_realign_tried = crtl->stack_realign_needed;
4114 crtl->stack_realign_processed = true;
4116 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4118 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4119 drap_rtx = targetm.calls.get_drap_rtx ();
4121 /* stack_realign_drap and drap_rtx must match. */
4122 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4124 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4125 if (NULL != drap_rtx)
4127 crtl->args.internal_arg_pointer = drap_rtx;
4129 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4131 fixup_tail_calls ();
4135 /* Translate the intermediate representation contained in the CFG
4136 from GIMPLE trees to RTL.
4138 We do conversion per basic block and preserve/update the tree CFG.
4139 This implies we have to do some magic as the CFG can simultaneously
4140 consist of basic blocks containing RTL and GIMPLE trees. This can
4141 confuse the CFG hooks, so be careful to not manipulate CFG during
4145 gimple_expand_cfg (void)
4147 basic_block bb, init_block;
4154 timevar_push (TV_OUT_OF_SSA);
4155 rewrite_out_of_ssa (&SA);
4156 timevar_pop (TV_OUT_OF_SSA);
4157 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4160 /* Some backends want to know that we are expanding to RTL. */
4161 currently_expanding_to_rtl = 1;
4163 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4165 insn_locators_alloc ();
4166 if (!DECL_IS_BUILTIN (current_function_decl))
4168 /* Eventually, all FEs should explicitly set function_start_locus. */
4169 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4170 set_curr_insn_source_location
4171 (DECL_SOURCE_LOCATION (current_function_decl));
4173 set_curr_insn_source_location (cfun->function_start_locus);
4176 set_curr_insn_source_location (UNKNOWN_LOCATION);
4177 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4178 prologue_locator = curr_insn_locator ();
4180 #ifdef INSN_SCHEDULING
4181 init_sched_attrs ();
4184 /* Make sure first insn is a note even if we don't want linenums.
4185 This makes sure the first insn will never be deleted.
4186 Also, final expects a note to appear there. */
4187 emit_note (NOTE_INSN_DELETED);
4189 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4190 discover_nonconstant_array_refs ();
4192 targetm.expand_to_rtl_hook ();
4193 crtl->stack_alignment_needed = STACK_BOUNDARY;
4194 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4195 crtl->stack_alignment_estimated = 0;
4196 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4197 cfun->cfg->max_jumptable_ents = 0;
4199 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4200 of the function section at exapnsion time to predict distance of calls. */
4201 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4203 /* Expand the variables recorded during gimple lowering. */
4204 timevar_push (TV_VAR_EXPAND);
4207 expand_used_vars ();
4209 var_seq = get_insns ();
4211 timevar_pop (TV_VAR_EXPAND);
4213 /* Honor stack protection warnings. */
4214 if (warn_stack_protect)
4216 if (cfun->calls_alloca)
4217 warning (OPT_Wstack_protector,
4218 "stack protector not protecting local variables: "
4219 "variable length buffer");
4220 if (has_short_buffer && !crtl->stack_protect_guard)
4221 warning (OPT_Wstack_protector,
4222 "stack protector not protecting function: "
4223 "all local arrays are less than %d bytes long",
4224 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4227 /* Set up parameters and prepare for return, for the function. */
4228 expand_function_start (current_function_decl);
4230 /* If we emitted any instructions for setting up the variables,
4231 emit them before the FUNCTION_START note. */
4234 emit_insn_before (var_seq, parm_birth_insn);
4236 /* In expand_function_end we'll insert the alloca save/restore
4237 before parm_birth_insn. We've just insertted an alloca call.
4238 Adjust the pointer to match. */
4239 parm_birth_insn = var_seq;
4242 /* Now that we also have the parameter RTXs, copy them over to our
4244 for (i = 0; i < SA.map->num_partitions; i++)
4246 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4248 if (TREE_CODE (var) != VAR_DECL
4249 && !SA.partition_to_pseudo[i])
4250 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4251 gcc_assert (SA.partition_to_pseudo[i]);
4253 /* If this decl was marked as living in multiple places, reset
4254 this now to NULL. */
4255 if (DECL_RTL_IF_SET (var) == pc_rtx)
4256 SET_DECL_RTL (var, NULL);
4258 /* Some RTL parts really want to look at DECL_RTL(x) when x
4259 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4260 SET_DECL_RTL here making this available, but that would mean
4261 to select one of the potentially many RTLs for one DECL. Instead
4262 of doing that we simply reset the MEM_EXPR of the RTL in question,
4263 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4264 if (!DECL_RTL_SET_P (var))
4266 if (MEM_P (SA.partition_to_pseudo[i]))
4267 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4271 /* If we have a class containing differently aligned pointers
4272 we need to merge those into the corresponding RTL pointer
4274 for (i = 1; i < num_ssa_names; i++)
4276 tree name = ssa_name (i);
4281 || !POINTER_TYPE_P (TREE_TYPE (name))
4282 /* We might have generated new SSA names in
4283 update_alias_info_with_stack_vars. They will have a NULL
4284 defining statements, and won't be part of the partitioning,
4286 || !SSA_NAME_DEF_STMT (name))
4288 part = var_to_partition (SA.map, name);
4289 if (part == NO_PARTITION)
4291 r = SA.partition_to_pseudo[part];
4293 mark_reg_pointer (r, get_pointer_alignment (name));
4296 /* If this function is `main', emit a call to `__main'
4297 to run global initializers, etc. */
4298 if (DECL_NAME (current_function_decl)
4299 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4300 && DECL_FILE_SCOPE_P (current_function_decl))
4301 expand_main_function ();
4303 /* Initialize the stack_protect_guard field. This must happen after the
4304 call to __main (if any) so that the external decl is initialized. */
4305 if (crtl->stack_protect_guard)
4306 stack_protect_prologue ();
4308 expand_phi_nodes (&SA);
4310 /* Register rtl specific functions for cfg. */
4311 rtl_register_cfg_hooks ();
4313 init_block = construct_init_block ();
4315 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4316 remaining edges later. */
4317 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4318 e->flags &= ~EDGE_EXECUTABLE;
4320 lab_rtx_for_bb = pointer_map_create ();
4321 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4322 bb = expand_gimple_basic_block (bb);
4324 if (MAY_HAVE_DEBUG_INSNS)
4325 expand_debug_locations ();
4327 execute_free_datastructures ();
4328 timevar_push (TV_OUT_OF_SSA);
4329 finish_out_of_ssa (&SA);
4330 timevar_pop (TV_OUT_OF_SSA);
4332 timevar_push (TV_POST_EXPAND);
4333 /* We are no longer in SSA form. */
4334 cfun->gimple_df->in_ssa_p = false;
4336 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4337 conservatively to true until they are all profile aware. */
4338 pointer_map_destroy (lab_rtx_for_bb);
4341 construct_exit_block ();
4342 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4343 insn_locators_finalize ();
4345 /* Zap the tree EH table. */
4346 set_eh_throw_stmt_table (cfun, NULL);
4348 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4349 split edges which edge insertions might do. */
4350 rebuild_jump_labels (get_insns ());
4352 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4356 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4360 rebuild_jump_labels_chain (e->insns.r);
4361 /* Avoid putting insns before parm_birth_insn. */
4362 if (e->src == ENTRY_BLOCK_PTR
4363 && single_succ_p (ENTRY_BLOCK_PTR)
4366 rtx insns = e->insns.r;
4367 e->insns.r = NULL_RTX;
4368 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4371 commit_one_edge_insertion (e);
4378 /* We're done expanding trees to RTL. */
4379 currently_expanding_to_rtl = 0;
4381 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4385 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4387 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4388 e->flags &= ~EDGE_EXECUTABLE;
4390 /* At the moment not all abnormal edges match the RTL
4391 representation. It is safe to remove them here as
4392 find_many_sub_basic_blocks will rediscover them.
4393 In the future we should get this fixed properly. */
4394 if ((e->flags & EDGE_ABNORMAL)
4395 && !(e->flags & EDGE_SIBCALL))
4402 blocks = sbitmap_alloc (last_basic_block);
4403 sbitmap_ones (blocks);
4404 find_many_sub_basic_blocks (blocks);
4405 sbitmap_free (blocks);
4406 purge_all_dead_edges ();
4410 expand_stack_alignment ();
4412 #ifdef ENABLE_CHECKING
4413 verify_flow_info ();
4416 /* There's no need to defer outputting this function any more; we
4417 know we want to output it. */
4418 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4420 /* Now that we're done expanding trees to RTL, we shouldn't have any
4421 more CONCATs anywhere. */
4422 generating_concat_p = 0;
4427 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4428 /* And the pass manager will dump RTL for us. */
4431 /* If we're emitting a nested function, make sure its parent gets
4432 emitted as well. Doing otherwise confuses debug info. */
4435 for (parent = DECL_CONTEXT (current_function_decl);
4436 parent != NULL_TREE;
4437 parent = get_containing_scope (parent))
4438 if (TREE_CODE (parent) == FUNCTION_DECL)
4439 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4442 /* We are now committed to emitting code for this function. Do any
4443 preparation, such as emitting abstract debug info for the inline
4444 before it gets mangled by optimization. */
4445 if (cgraph_function_possibly_inlined_p (current_function_decl))
4446 (*debug_hooks->outlining_inline_function) (current_function_decl);
4448 TREE_ASM_WRITTEN (current_function_decl) = 1;
4450 /* After expanding, the return labels are no longer needed. */
4451 return_label = NULL;
4452 naked_return_label = NULL;
4453 /* Tag the blocks with a depth number so that change_scope can find
4454 the common parent easily. */
4455 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4456 default_rtl_profile ();
4457 timevar_pop (TV_POST_EXPAND);
4461 struct rtl_opt_pass pass_expand =
4465 "expand", /* name */
4467 gimple_expand_cfg, /* execute */
4470 0, /* static_pass_number */
4471 TV_EXPAND, /* tv_id */
4472 PROP_ssa | PROP_gimple_leh | PROP_cfg
4473 | PROP_gimple_lcx, /* properties_required */
4474 PROP_rtl, /* properties_provided */
4475 PROP_ssa | PROP_trees, /* properties_destroyed */
4476 TODO_verify_ssa | TODO_verify_flow
4477 | TODO_verify_stmts, /* todo_flags_start */
4478 TODO_ggc_collect /* todo_flags_finish */