1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt)
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt) != TREE_BLOCK (t)))
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set alrady to "multiple places" don't
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted;
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase;
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls;
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer;
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
209 align_local_variable (tree decl)
211 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
212 DECL_ALIGN (decl) = align;
213 return align / BITS_PER_UNIT;
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
220 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222 HOST_WIDE_INT offset, new_frame_offset;
224 new_frame_offset = frame_offset;
225 if (FRAME_GROWS_DOWNWARD)
227 new_frame_offset -= size + frame_phase;
228 new_frame_offset &= -align;
229 new_frame_offset += frame_phase;
230 offset = new_frame_offset;
234 new_frame_offset -= frame_phase;
235 new_frame_offset += align - 1;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
239 new_frame_offset += size;
241 frame_offset = new_frame_offset;
243 if (frame_offset_overflow (frame_offset, cfun->decl))
244 frame_offset = offset = 0;
249 /* Accumulate DECL into STACK_VARS. */
252 add_stack_var (tree decl)
256 if (stack_vars_num >= stack_vars_alloc)
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 stack_vars_alloc = 32;
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265 v = &stack_vars[stack_vars_num];
268 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
269 /* Ensure that all variables have size, so that &a != &b for any two
270 variables that are simultaneously live. */
273 v->alignb = align_local_variable (SSAVAR (decl));
275 /* All variables are initially in their own partition. */
276 v->representative = stack_vars_num;
279 /* All variables initially conflict with no other. */
282 /* Ensure that this decl doesn't get put onto the list twice. */
283 set_rtl (decl, pc_rtx);
288 /* Make the decls associated with luid's X and Y conflict. */
291 add_stack_var_conflict (size_t x, size_t y)
293 struct stack_var *a = &stack_vars[x];
294 struct stack_var *b = &stack_vars[y];
296 a->conflicts = BITMAP_ALLOC (NULL);
298 b->conflicts = BITMAP_ALLOC (NULL);
299 bitmap_set_bit (a->conflicts, y);
300 bitmap_set_bit (b->conflicts, x);
303 /* Check whether the decls associated with luid's X and Y conflict. */
306 stack_var_conflict_p (size_t x, size_t y)
308 struct stack_var *a = &stack_vars[x];
309 struct stack_var *b = &stack_vars[y];
310 if (!a->conflicts || !b->conflicts)
312 return bitmap_bit_p (a->conflicts, y);
315 /* Returns true if TYPE is or contains a union type. */
318 aggregate_contains_union_type (tree type)
322 if (TREE_CODE (type) == UNION_TYPE
323 || TREE_CODE (type) == QUAL_UNION_TYPE)
325 if (TREE_CODE (type) == ARRAY_TYPE)
326 return aggregate_contains_union_type (TREE_TYPE (type));
327 if (TREE_CODE (type) != RECORD_TYPE)
330 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
331 if (TREE_CODE (field) == FIELD_DECL)
332 if (aggregate_contains_union_type (TREE_TYPE (field)))
338 /* A subroutine of expand_used_vars. If two variables X and Y have alias
339 sets that do not conflict, then do add a conflict for these variables
340 in the interference graph. We also need to make sure to add conflicts
341 for union containing structures. Else RTL alias analysis comes along
342 and due to type based aliasing rules decides that for two overlapping
343 union temporaries { short s; int i; } accesses to the same mem through
344 different types may not alias and happily reorders stores across
345 life-time boundaries of the temporaries (See PR25654).
346 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
349 add_alias_set_conflicts (void)
351 size_t i, j, n = stack_vars_num;
353 for (i = 0; i < n; ++i)
355 tree type_i = TREE_TYPE (stack_vars[i].decl);
356 bool aggr_i = AGGREGATE_TYPE_P (type_i);
359 contains_union = aggregate_contains_union_type (type_i);
360 for (j = 0; j < i; ++j)
362 tree type_j = TREE_TYPE (stack_vars[j].decl);
363 bool aggr_j = AGGREGATE_TYPE_P (type_j);
365 /* Either the objects conflict by means of type based
366 aliasing rules, or we need to add a conflict. */
367 || !objects_must_conflict_p (type_i, type_j)
368 /* In case the types do not conflict ensure that access
369 to elements will conflict. In case of unions we have
370 to be careful as type based aliasing rules may say
371 access to the same memory does not conflict. So play
372 safe and add a conflict in this case when
373 -fstrict-aliasing is used. */
374 || (contains_union && flag_strict_aliasing))
375 add_stack_var_conflict (i, j);
380 /* A subroutine of partition_stack_vars. A comparison function for qsort,
381 sorting an array of indices by the properties of the object. */
384 stack_var_cmp (const void *a, const void *b)
386 size_t ia = *(const size_t *)a;
387 size_t ib = *(const size_t *)b;
388 unsigned int aligna = stack_vars[ia].alignb;
389 unsigned int alignb = stack_vars[ib].alignb;
390 HOST_WIDE_INT sizea = stack_vars[ia].size;
391 HOST_WIDE_INT sizeb = stack_vars[ib].size;
392 tree decla = stack_vars[ia].decl;
393 tree declb = stack_vars[ib].decl;
395 unsigned int uida, uidb;
397 /* Primary compare on "large" alignment. Large comes first. */
398 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
399 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
400 if (largea != largeb)
401 return (int)largeb - (int)largea;
403 /* Secondary compare on size, decreasing */
409 /* Tertiary compare on true alignment, decreasing. */
415 /* Final compare on ID for sort stability, increasing.
416 Two SSA names are compared by their version, SSA names come before
417 non-SSA names, and two normal decls are compared by their DECL_UID. */
418 if (TREE_CODE (decla) == SSA_NAME)
420 if (TREE_CODE (declb) == SSA_NAME)
421 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
425 else if (TREE_CODE (declb) == SSA_NAME)
428 uida = DECL_UID (decla), uidb = DECL_UID (declb);
437 /* If the points-to solution *PI points to variables that are in a partition
438 together with other variables add all partition members to the pointed-to
442 add_partitioned_vars_to_ptset (struct pt_solution *pt,
443 struct pointer_map_t *decls_to_partitions,
444 struct pointer_set_t *visited, bitmap temp)
452 /* The pointed-to vars bitmap is shared, it is enough to
454 || pointer_set_insert(visited, pt->vars))
459 /* By using a temporary bitmap to store all members of the partitions
460 we have to add we make sure to visit each of the partitions only
462 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
464 || !bitmap_bit_p (temp, i))
465 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
466 (void *)(size_t) i)))
467 bitmap_ior_into (temp, *part);
468 if (!bitmap_empty_p (temp))
469 bitmap_ior_into (pt->vars, temp);
472 /* Update points-to sets based on partition info, so we can use them on RTL.
473 The bitmaps representing stack partitions will be saved until expand,
474 where partitioned decls used as bases in memory expressions will be
478 update_alias_info_with_stack_vars (void)
480 struct pointer_map_t *decls_to_partitions = NULL;
482 tree var = NULL_TREE;
484 for (i = 0; i < stack_vars_num; i++)
488 struct ptr_info_def *pi;
490 /* Not interested in partitions with single variable. */
491 if (stack_vars[i].representative != i
492 || stack_vars[i].next == EOC)
495 if (!decls_to_partitions)
497 decls_to_partitions = pointer_map_create ();
498 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
501 /* Create an SSA_NAME that points to the partition for use
502 as base during alias-oracle queries on RTL for bases that
503 have been partitioned. */
504 if (var == NULL_TREE)
505 var = create_tmp_var (ptr_type_node, NULL);
506 name = make_ssa_name (var, NULL);
508 /* Create bitmaps representing partitions. They will be used for
509 points-to sets later, so use GGC alloc. */
510 part = BITMAP_GGC_ALLOC ();
511 for (j = i; j != EOC; j = stack_vars[j].next)
513 tree decl = stack_vars[j].decl;
514 unsigned int uid = DECL_PT_UID (decl);
515 /* We should never end up partitioning SSA names (though they
516 may end up on the stack). Neither should we allocate stack
517 space to something that is unused and thus unreferenced, except
518 for -O0 where we are preserving even unreferenced variables. */
519 gcc_assert (DECL_P (decl)
521 || referenced_var_lookup (cfun, DECL_UID (decl))));
522 bitmap_set_bit (part, uid);
523 *((bitmap *) pointer_map_insert (decls_to_partitions,
524 (void *)(size_t) uid)) = part;
525 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
529 /* Make the SSA name point to all partition members. */
530 pi = get_ptr_info (name);
531 pt_solution_set (&pi->pt, part, false, false);
534 /* Make all points-to sets that contain one member of a partition
535 contain all members of the partition. */
536 if (decls_to_partitions)
539 struct pointer_set_t *visited = pointer_set_create ();
540 bitmap temp = BITMAP_ALLOC (NULL);
542 for (i = 1; i < num_ssa_names; i++)
544 tree name = ssa_name (i);
545 struct ptr_info_def *pi;
548 && POINTER_TYPE_P (TREE_TYPE (name))
549 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
550 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
554 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
555 decls_to_partitions, visited, temp);
557 pointer_set_destroy (visited);
558 pointer_map_destroy (decls_to_partitions);
563 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
564 partitioning algorithm. Partitions A and B are known to be non-conflicting.
565 Merge them into a single partition A. */
568 union_stack_vars (size_t a, size_t b)
570 struct stack_var *vb = &stack_vars[b];
574 gcc_assert (stack_vars[b].next == EOC);
575 /* Add B to A's partition. */
576 stack_vars[b].next = stack_vars[a].next;
577 stack_vars[b].representative = a;
578 stack_vars[a].next = b;
580 /* Update the required alignment of partition A to account for B. */
581 if (stack_vars[a].alignb < stack_vars[b].alignb)
582 stack_vars[a].alignb = stack_vars[b].alignb;
584 /* Update the interference graph and merge the conflicts. */
587 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
588 add_stack_var_conflict (a, stack_vars[u].representative);
589 BITMAP_FREE (vb->conflicts);
593 /* A subroutine of expand_used_vars. Binpack the variables into
594 partitions constrained by the interference graph. The overall
595 algorithm used is as follows:
597 Sort the objects by size in descending order.
602 Look for the largest non-conflicting object B with size <= S.
609 partition_stack_vars (void)
611 size_t si, sj, n = stack_vars_num;
613 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
614 for (si = 0; si < n; ++si)
615 stack_vars_sorted[si] = si;
620 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
622 for (si = 0; si < n; ++si)
624 size_t i = stack_vars_sorted[si];
625 unsigned int ialign = stack_vars[i].alignb;
627 /* Ignore objects that aren't partition representatives. If we
628 see a var that is not a partition representative, it must
629 have been merged earlier. */
630 if (stack_vars[i].representative != i)
633 for (sj = si + 1; sj < n; ++sj)
635 size_t j = stack_vars_sorted[sj];
636 unsigned int jalign = stack_vars[j].alignb;
638 /* Ignore objects that aren't partition representatives. */
639 if (stack_vars[j].representative != j)
642 /* Ignore conflicting objects. */
643 if (stack_var_conflict_p (i, j))
646 /* Do not mix objects of "small" (supported) alignment
647 and "large" (unsupported) alignment. */
648 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
649 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
652 /* UNION the objects, placing J at OFFSET. */
653 union_stack_vars (i, j);
657 update_alias_info_with_stack_vars ();
660 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
663 dump_stack_var_partition (void)
665 size_t si, i, j, n = stack_vars_num;
667 for (si = 0; si < n; ++si)
669 i = stack_vars_sorted[si];
671 /* Skip variables that aren't partition representatives, for now. */
672 if (stack_vars[i].representative != i)
675 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
676 " align %u\n", (unsigned long) i, stack_vars[i].size,
677 stack_vars[i].alignb);
679 for (j = i; j != EOC; j = stack_vars[j].next)
681 fputc ('\t', dump_file);
682 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
684 fputc ('\n', dump_file);
688 /* Assign rtl to DECL at BASE + OFFSET. */
691 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
692 HOST_WIDE_INT offset)
697 /* If this fails, we've overflowed the stack frame. Error nicely? */
698 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
700 x = plus_constant (base, offset);
701 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
703 if (TREE_CODE (decl) != SSA_NAME)
705 /* Set alignment we actually gave this decl if it isn't an SSA name.
706 If it is we generate stack slots only accidentally so it isn't as
707 important, we'll simply use the alignment that is already set. */
708 if (base == virtual_stack_vars_rtx)
709 offset -= frame_phase;
710 align = offset & -offset;
711 align *= BITS_PER_UNIT;
712 if (align == 0 || align > base_align)
715 /* One would think that we could assert that we're not decreasing
716 alignment here, but (at least) the i386 port does exactly this
717 via the MINIMUM_ALIGNMENT hook. */
719 DECL_ALIGN (decl) = align;
720 DECL_USER_ALIGN (decl) = 0;
723 set_mem_attributes (x, SSAVAR (decl), true);
727 /* A subroutine of expand_used_vars. Give each partition representative
728 a unique location within the stack frame. Update each partition member
729 with that location. */
732 expand_stack_vars (bool (*pred) (tree))
734 size_t si, i, j, n = stack_vars_num;
735 HOST_WIDE_INT large_size = 0, large_alloc = 0;
736 rtx large_base = NULL;
737 unsigned large_align = 0;
740 /* Determine if there are any variables requiring "large" alignment.
741 Since these are dynamically allocated, we only process these if
742 no predicate involved. */
743 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
744 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
746 /* Find the total size of these variables. */
747 for (si = 0; si < n; ++si)
751 i = stack_vars_sorted[si];
752 alignb = stack_vars[i].alignb;
754 /* Stop when we get to the first decl with "small" alignment. */
755 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
758 /* Skip variables that aren't partition representatives. */
759 if (stack_vars[i].representative != i)
762 /* Skip variables that have already had rtl assigned. See also
763 add_stack_var where we perpetrate this pc_rtx hack. */
764 decl = stack_vars[i].decl;
765 if ((TREE_CODE (decl) == SSA_NAME
766 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
767 : DECL_RTL (decl)) != pc_rtx)
770 large_size += alignb - 1;
771 large_size &= -(HOST_WIDE_INT)alignb;
772 large_size += stack_vars[i].size;
775 /* If there were any, allocate space. */
777 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
781 for (si = 0; si < n; ++si)
784 unsigned base_align, alignb;
785 HOST_WIDE_INT offset;
787 i = stack_vars_sorted[si];
789 /* Skip variables that aren't partition representatives, for now. */
790 if (stack_vars[i].representative != i)
793 /* Skip variables that have already had rtl assigned. See also
794 add_stack_var where we perpetrate this pc_rtx hack. */
795 decl = stack_vars[i].decl;
796 if ((TREE_CODE (decl) == SSA_NAME
797 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
798 : DECL_RTL (decl)) != pc_rtx)
801 /* Check the predicate to see whether this variable should be
802 allocated in this pass. */
803 if (pred && !pred (decl))
806 alignb = stack_vars[i].alignb;
807 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
809 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
810 base = virtual_stack_vars_rtx;
811 base_align = crtl->max_used_stack_slot_alignment;
815 /* Large alignment is only processed in the last pass. */
818 gcc_assert (large_base != NULL);
820 large_alloc += alignb - 1;
821 large_alloc &= -(HOST_WIDE_INT)alignb;
822 offset = large_alloc;
823 large_alloc += stack_vars[i].size;
826 base_align = large_align;
829 /* Create rtl for each variable based on their location within the
831 for (j = i; j != EOC; j = stack_vars[j].next)
833 expand_one_stack_var_at (stack_vars[j].decl,
839 gcc_assert (large_alloc == large_size);
842 /* Take into account all sizes of partitions and reset DECL_RTLs. */
844 account_stack_vars (void)
846 size_t si, j, i, n = stack_vars_num;
847 HOST_WIDE_INT size = 0;
849 for (si = 0; si < n; ++si)
851 i = stack_vars_sorted[si];
853 /* Skip variables that aren't partition representatives, for now. */
854 if (stack_vars[i].representative != i)
857 size += stack_vars[i].size;
858 for (j = i; j != EOC; j = stack_vars[j].next)
859 set_rtl (stack_vars[j].decl, NULL);
864 /* A subroutine of expand_one_var. Called to immediately assign rtl
865 to a variable to be allocated in the stack frame. */
868 expand_one_stack_var (tree var)
870 HOST_WIDE_INT size, offset;
873 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
874 byte_align = align_local_variable (SSAVAR (var));
876 /* We handle highly aligned variables in expand_stack_vars. */
877 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
879 offset = alloc_stack_frame_space (size, byte_align);
881 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
882 crtl->max_used_stack_slot_alignment, offset);
885 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
886 that will reside in a hard register. */
889 expand_one_hard_reg_var (tree var)
891 rest_of_decl_compilation (var, 0, 0);
894 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
895 that will reside in a pseudo register. */
898 expand_one_register_var (tree var)
900 tree decl = SSAVAR (var);
901 tree type = TREE_TYPE (decl);
902 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
903 rtx x = gen_reg_rtx (reg_mode);
907 /* Note if the object is a user variable. */
908 if (!DECL_ARTIFICIAL (decl))
911 if (POINTER_TYPE_P (type))
912 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
915 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
916 has some associated error, e.g. its type is error-mark. We just need
917 to pick something that won't crash the rest of the compiler. */
920 expand_one_error_var (tree var)
922 enum machine_mode mode = DECL_MODE (var);
926 x = gen_rtx_MEM (BLKmode, const0_rtx);
927 else if (mode == VOIDmode)
930 x = gen_reg_rtx (mode);
932 SET_DECL_RTL (var, x);
935 /* A subroutine of expand_one_var. VAR is a variable that will be
936 allocated to the local stack frame. Return true if we wish to
937 add VAR to STACK_VARS so that it will be coalesced with other
938 variables. Return false to allocate VAR immediately.
940 This function is used to reduce the number of variables considered
941 for coalescing, which reduces the size of the quadratic problem. */
944 defer_stack_allocation (tree var, bool toplevel)
946 /* If stack protection is enabled, *all* stack variables must be deferred,
947 so that we can re-order the strings to the top of the frame. */
948 if (flag_stack_protect)
951 /* We handle "large" alignment via dynamic allocation. We want to handle
952 this extra complication in only one place, so defer them. */
953 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
956 /* Variables in the outermost scope automatically conflict with
957 every other variable. The only reason to want to defer them
958 at all is that, after sorting, we can more efficiently pack
959 small variables in the stack frame. Continue to defer at -O2. */
960 if (toplevel && optimize < 2)
963 /* Without optimization, *most* variables are allocated from the
964 stack, which makes the quadratic problem large exactly when we
965 want compilation to proceed as quickly as possible. On the
966 other hand, we don't want the function's stack frame size to
967 get completely out of hand. So we avoid adding scalars and
968 "small" aggregates to the list at all. */
969 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
975 /* A subroutine of expand_used_vars. Expand one variable according to
976 its flavor. Variables to be placed on the stack are not actually
977 expanded yet, merely recorded.
978 When REALLY_EXPAND is false, only add stack values to be allocated.
979 Return stack usage this variable is supposed to take.
983 expand_one_var (tree var, bool toplevel, bool really_expand)
985 unsigned int align = BITS_PER_UNIT;
990 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
992 /* Because we don't know if VAR will be in register or on stack,
993 we conservatively assume it will be on stack even if VAR is
994 eventually put into register after RA pass. For non-automatic
995 variables, which won't be on stack, we collect alignment of
996 type and ignore user specified alignment. */
997 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
998 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
999 TYPE_MODE (TREE_TYPE (var)),
1000 TYPE_ALIGN (TREE_TYPE (var)));
1001 else if (DECL_HAS_VALUE_EXPR_P (var)
1002 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1003 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1004 or variables which were assigned a stack slot already by
1005 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1006 changed from the offset chosen to it. */
1007 align = crtl->stack_alignment_estimated;
1009 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1011 /* If the variable alignment is very large we'll dynamicaly allocate
1012 it, which means that in-frame portion is just a pointer. */
1013 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1014 align = POINTER_SIZE;
1017 if (SUPPORTS_STACK_ALIGNMENT
1018 && crtl->stack_alignment_estimated < align)
1020 /* stack_alignment_estimated shouldn't change after stack
1021 realign decision made */
1022 gcc_assert(!crtl->stack_realign_processed);
1023 crtl->stack_alignment_estimated = align;
1026 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1027 So here we only make sure stack_alignment_needed >= align. */
1028 if (crtl->stack_alignment_needed < align)
1029 crtl->stack_alignment_needed = align;
1030 if (crtl->max_used_stack_slot_alignment < align)
1031 crtl->max_used_stack_slot_alignment = align;
1033 if (TREE_CODE (origvar) == SSA_NAME)
1035 gcc_assert (TREE_CODE (var) != VAR_DECL
1036 || (!DECL_EXTERNAL (var)
1037 && !DECL_HAS_VALUE_EXPR_P (var)
1038 && !TREE_STATIC (var)
1039 && TREE_TYPE (var) != error_mark_node
1040 && !DECL_HARD_REGISTER (var)
1043 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1045 else if (DECL_EXTERNAL (var))
1047 else if (DECL_HAS_VALUE_EXPR_P (var))
1049 else if (TREE_STATIC (var))
1051 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1053 else if (TREE_TYPE (var) == error_mark_node)
1056 expand_one_error_var (var);
1058 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1061 expand_one_hard_reg_var (var);
1063 else if (use_register_for_decl (var))
1066 expand_one_register_var (origvar);
1068 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1072 error ("size of variable %q+D is too large", var);
1073 expand_one_error_var (var);
1076 else if (defer_stack_allocation (var, toplevel))
1077 add_stack_var (origvar);
1081 expand_one_stack_var (origvar);
1082 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1087 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1088 expanding variables. Those variables that can be put into registers
1089 are allocated pseudos; those that can't are put on the stack.
1091 TOPLEVEL is true if this is the outermost BLOCK. */
1094 expand_used_vars_for_block (tree block, bool toplevel)
1096 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1099 old_sv_num = toplevel ? 0 : stack_vars_num;
1101 /* Expand all variables at this level. */
1102 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1104 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1105 || !DECL_NONSHAREABLE (t)))
1106 expand_one_var (t, toplevel, true);
1108 this_sv_num = stack_vars_num;
1110 /* Expand all variables at containing levels. */
1111 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1112 expand_used_vars_for_block (t, false);
1114 /* Since we do not track exact variable lifetimes (which is not even
1115 possible for variables whose address escapes), we mirror the block
1116 tree in the interference graph. Here we cause all variables at this
1117 level, and all sublevels, to conflict. */
1118 if (old_sv_num < this_sv_num)
1120 new_sv_num = stack_vars_num;
1122 for (i = old_sv_num; i < new_sv_num; ++i)
1123 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1124 add_stack_var_conflict (i, j);
1128 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1129 and clear TREE_USED on all local variables. */
1132 clear_tree_used (tree block)
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1137 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1138 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1139 || !DECL_NONSHAREABLE (t))
1142 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1143 clear_tree_used (t);
1146 /* Examine TYPE and determine a bit mask of the following features. */
1148 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1149 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1150 #define SPCT_HAS_ARRAY 4
1151 #define SPCT_HAS_AGGREGATE 8
1154 stack_protect_classify_type (tree type)
1156 unsigned int ret = 0;
1159 switch (TREE_CODE (type))
1162 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1163 if (t == char_type_node
1164 || t == signed_char_type_node
1165 || t == unsigned_char_type_node)
1167 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1168 unsigned HOST_WIDE_INT len;
1170 if (!TYPE_SIZE_UNIT (type)
1171 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1174 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1177 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1179 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1182 ret = SPCT_HAS_ARRAY;
1186 case QUAL_UNION_TYPE:
1188 ret = SPCT_HAS_AGGREGATE;
1189 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1190 if (TREE_CODE (t) == FIELD_DECL)
1191 ret |= stack_protect_classify_type (TREE_TYPE (t));
1201 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1202 part of the local stack frame. Remember if we ever return nonzero for
1203 any variable in this function. The return value is the phase number in
1204 which the variable should be allocated. */
1207 stack_protect_decl_phase (tree decl)
1209 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1212 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1213 has_short_buffer = true;
1215 if (flag_stack_protect == 2)
1217 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1218 && !(bits & SPCT_HAS_AGGREGATE))
1220 else if (bits & SPCT_HAS_ARRAY)
1224 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1227 has_protected_decls = true;
1232 /* Two helper routines that check for phase 1 and phase 2. These are used
1233 as callbacks for expand_stack_vars. */
1236 stack_protect_decl_phase_1 (tree decl)
1238 return stack_protect_decl_phase (decl) == 1;
1242 stack_protect_decl_phase_2 (tree decl)
1244 return stack_protect_decl_phase (decl) == 2;
1247 /* Ensure that variables in different stack protection phases conflict
1248 so that they are not merged and share the same stack slot. */
1251 add_stack_protection_conflicts (void)
1253 size_t i, j, n = stack_vars_num;
1254 unsigned char *phase;
1256 phase = XNEWVEC (unsigned char, n);
1257 for (i = 0; i < n; ++i)
1258 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1260 for (i = 0; i < n; ++i)
1262 unsigned char ph_i = phase[i];
1263 for (j = 0; j < i; ++j)
1264 if (ph_i != phase[j])
1265 add_stack_var_conflict (i, j);
1271 /* Create a decl for the guard at the top of the stack frame. */
1274 create_stack_guard (void)
1276 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1277 VAR_DECL, NULL, ptr_type_node);
1278 TREE_THIS_VOLATILE (guard) = 1;
1279 TREE_USED (guard) = 1;
1280 expand_one_stack_var (guard);
1281 crtl->stack_protect_guard = guard;
1284 /* Prepare for expanding variables. */
1286 init_vars_expansion (void)
1290 /* Set TREE_USED on all variables in the local_decls. */
1291 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1294 /* Clear TREE_USED on all variables associated with a block scope. */
1295 clear_tree_used (DECL_INITIAL (current_function_decl));
1297 /* Initialize local stack smashing state. */
1298 has_protected_decls = false;
1299 has_short_buffer = false;
1302 /* Free up stack variable graph data. */
1304 fini_vars_expansion (void)
1306 size_t i, n = stack_vars_num;
1307 for (i = 0; i < n; i++)
1308 BITMAP_FREE (stack_vars[i].conflicts);
1309 XDELETEVEC (stack_vars);
1310 XDELETEVEC (stack_vars_sorted);
1312 stack_vars_alloc = stack_vars_num = 0;
1315 /* Make a fair guess for the size of the stack frame of the function
1316 in NODE. This doesn't have to be exact, the result is only used in
1317 the inline heuristics. So we don't want to run the full stack var
1318 packing algorithm (which is quadratic in the number of stack vars).
1319 Instead, we calculate the total size of all stack vars. This turns
1320 out to be a pretty fair estimate -- packing of stack vars doesn't
1321 happen very often. */
1324 estimated_stack_frame_size (struct cgraph_node *node)
1326 HOST_WIDE_INT size = 0;
1329 tree old_cur_fun_decl = current_function_decl;
1330 referenced_var_iterator rvi;
1331 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1333 current_function_decl = node->decl;
1336 gcc_checking_assert (gimple_referenced_vars (fn));
1337 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1338 size += expand_one_var (var, true, false);
1340 if (stack_vars_num > 0)
1342 /* Fake sorting the stack vars for account_stack_vars (). */
1343 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1344 for (i = 0; i < stack_vars_num; ++i)
1345 stack_vars_sorted[i] = i;
1346 size += account_stack_vars ();
1347 fini_vars_expansion ();
1350 current_function_decl = old_cur_fun_decl;
1354 /* Expand all variables used in the function. */
1357 expand_used_vars (void)
1359 tree var, outer_block = DECL_INITIAL (current_function_decl);
1360 VEC(tree,heap) *maybe_local_decls = NULL;
1364 /* Compute the phase of the stack frame for this function. */
1366 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1367 int off = STARTING_FRAME_OFFSET % align;
1368 frame_phase = off ? align - off : 0;
1371 init_vars_expansion ();
1373 for (i = 0; i < SA.map->num_partitions; i++)
1375 tree var = partition_to_var (SA.map, i);
1377 gcc_assert (is_gimple_reg (var));
1378 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1379 expand_one_var (var, true, true);
1382 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1383 contain the default def (representing the parm or result itself)
1384 we don't do anything here. But those which don't contain the
1385 default def (representing a temporary based on the parm/result)
1386 we need to allocate space just like for normal VAR_DECLs. */
1387 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1389 expand_one_var (var, true, true);
1390 gcc_assert (SA.partition_to_pseudo[i]);
1395 /* At this point all variables on the local_decls with TREE_USED
1396 set are not associated with any block scope. Lay them out. */
1398 len = VEC_length (tree, cfun->local_decls);
1399 FOR_EACH_LOCAL_DECL (cfun, i, var)
1401 bool expand_now = false;
1403 /* Expanded above already. */
1404 if (is_gimple_reg (var))
1406 TREE_USED (var) = 0;
1409 /* We didn't set a block for static or extern because it's hard
1410 to tell the difference between a global variable (re)declared
1411 in a local scope, and one that's really declared there to
1412 begin with. And it doesn't really matter much, since we're
1413 not giving them stack space. Expand them now. */
1414 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1417 /* If the variable is not associated with any block, then it
1418 was created by the optimizers, and could be live anywhere
1420 else if (TREE_USED (var))
1423 /* Finally, mark all variables on the list as used. We'll use
1424 this in a moment when we expand those associated with scopes. */
1425 TREE_USED (var) = 1;
1428 expand_one_var (var, true, true);
1431 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1433 rtx rtl = DECL_RTL_IF_SET (var);
1435 /* Keep artificial non-ignored vars in cfun->local_decls
1436 chain until instantiate_decls. */
1437 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1438 add_local_decl (cfun, var);
1439 else if (rtl == NULL_RTX)
1440 /* If rtl isn't set yet, which can happen e.g. with
1441 -fstack-protector, retry before returning from this
1443 VEC_safe_push (tree, heap, maybe_local_decls, var);
1447 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1449 +-----------------+-----------------+
1450 | ...processed... | ...duplicates...|
1451 +-----------------+-----------------+
1453 +-- LEN points here.
1455 We just want the duplicates, as those are the artificial
1456 non-ignored vars that we want to keep until instantiate_decls.
1457 Move them down and truncate the array. */
1458 if (!VEC_empty (tree, cfun->local_decls))
1459 VEC_block_remove (tree, cfun->local_decls, 0, len);
1461 /* At this point, all variables within the block tree with TREE_USED
1462 set are actually used by the optimized function. Lay them out. */
1463 expand_used_vars_for_block (outer_block, true);
1465 if (stack_vars_num > 0)
1467 /* Due to the way alias sets work, no variables with non-conflicting
1468 alias sets may be assigned the same address. Add conflicts to
1470 add_alias_set_conflicts ();
1472 /* If stack protection is enabled, we don't share space between
1473 vulnerable data and non-vulnerable data. */
1474 if (flag_stack_protect)
1475 add_stack_protection_conflicts ();
1477 /* Now that we have collected all stack variables, and have computed a
1478 minimal interference graph, attempt to save some stack space. */
1479 partition_stack_vars ();
1481 dump_stack_var_partition ();
1484 /* There are several conditions under which we should create a
1485 stack guard: protect-all, alloca used, protected decls present. */
1486 if (flag_stack_protect == 2
1487 || (flag_stack_protect
1488 && (cfun->calls_alloca || has_protected_decls)))
1489 create_stack_guard ();
1491 /* Assign rtl to each variable based on these partitions. */
1492 if (stack_vars_num > 0)
1494 /* Reorder decls to be protected by iterating over the variables
1495 array multiple times, and allocating out of each phase in turn. */
1496 /* ??? We could probably integrate this into the qsort we did
1497 earlier, such that we naturally see these variables first,
1498 and thus naturally allocate things in the right order. */
1499 if (has_protected_decls)
1501 /* Phase 1 contains only character arrays. */
1502 expand_stack_vars (stack_protect_decl_phase_1);
1504 /* Phase 2 contains other kinds of arrays. */
1505 if (flag_stack_protect == 2)
1506 expand_stack_vars (stack_protect_decl_phase_2);
1509 expand_stack_vars (NULL);
1511 fini_vars_expansion ();
1514 /* If there were any artificial non-ignored vars without rtl
1515 found earlier, see if deferred stack allocation hasn't assigned
1517 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1519 rtx rtl = DECL_RTL_IF_SET (var);
1521 /* Keep artificial non-ignored vars in cfun->local_decls
1522 chain until instantiate_decls. */
1523 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1524 add_local_decl (cfun, var);
1526 VEC_free (tree, heap, maybe_local_decls);
1528 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1529 if (STACK_ALIGNMENT_NEEDED)
1531 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1532 if (!FRAME_GROWS_DOWNWARD)
1533 frame_offset += align - 1;
1534 frame_offset &= -align;
1539 /* If we need to produce a detailed dump, print the tree representation
1540 for STMT to the dump file. SINCE is the last RTX after which the RTL
1541 generated for STMT should have been appended. */
1544 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1546 if (dump_file && (dump_flags & TDF_DETAILS))
1548 fprintf (dump_file, "\n;; ");
1549 print_gimple_stmt (dump_file, stmt, 0,
1550 TDF_SLIM | (dump_flags & TDF_LINENO));
1551 fprintf (dump_file, "\n");
1553 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1557 /* Maps the blocks that do not contain tree labels to rtx labels. */
1559 static struct pointer_map_t *lab_rtx_for_bb;
1561 /* Returns the label_rtx expression for a label starting basic block BB. */
1564 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1566 gimple_stmt_iterator gsi;
1571 if (bb->flags & BB_RTL)
1572 return block_label (bb);
1574 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1578 /* Find the tree label if it is present. */
1580 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1582 lab_stmt = gsi_stmt (gsi);
1583 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1586 lab = gimple_label_label (lab_stmt);
1587 if (DECL_NONLOCAL (lab))
1590 return label_rtx (lab);
1593 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1594 *elt = gen_label_rtx ();
1599 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1600 of a basic block where we just expanded the conditional at the end,
1601 possibly clean up the CFG and instruction sequence. LAST is the
1602 last instruction before the just emitted jump sequence. */
1605 maybe_cleanup_end_of_block (edge e, rtx last)
1607 /* Special case: when jumpif decides that the condition is
1608 trivial it emits an unconditional jump (and the necessary
1609 barrier). But we still have two edges, the fallthru one is
1610 wrong. purge_dead_edges would clean this up later. Unfortunately
1611 we have to insert insns (and split edges) before
1612 find_many_sub_basic_blocks and hence before purge_dead_edges.
1613 But splitting edges might create new blocks which depend on the
1614 fact that if there are two edges there's no barrier. So the
1615 barrier would get lost and verify_flow_info would ICE. Instead
1616 of auditing all edge splitters to care for the barrier (which
1617 normally isn't there in a cleaned CFG), fix it here. */
1618 if (BARRIER_P (get_last_insn ()))
1622 /* Now, we have a single successor block, if we have insns to
1623 insert on the remaining edge we potentially will insert
1624 it at the end of this block (if the dest block isn't feasible)
1625 in order to avoid splitting the edge. This insertion will take
1626 place in front of the last jump. But we might have emitted
1627 multiple jumps (conditional and one unconditional) to the
1628 same destination. Inserting in front of the last one then
1629 is a problem. See PR 40021. We fix this by deleting all
1630 jumps except the last unconditional one. */
1631 insn = PREV_INSN (get_last_insn ());
1632 /* Make sure we have an unconditional jump. Otherwise we're
1634 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1635 for (insn = PREV_INSN (insn); insn != last;)
1637 insn = PREV_INSN (insn);
1638 if (JUMP_P (NEXT_INSN (insn)))
1640 if (!any_condjump_p (NEXT_INSN (insn)))
1642 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1643 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1645 delete_insn (NEXT_INSN (insn));
1651 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1652 Returns a new basic block if we've terminated the current basic
1653 block and created a new one. */
1656 expand_gimple_cond (basic_block bb, gimple stmt)
1658 basic_block new_bb, dest;
1663 enum tree_code code;
1666 code = gimple_cond_code (stmt);
1667 op0 = gimple_cond_lhs (stmt);
1668 op1 = gimple_cond_rhs (stmt);
1669 /* We're sometimes presented with such code:
1673 This would expand to two comparisons which then later might
1674 be cleaned up by combine. But some pattern matchers like if-conversion
1675 work better when there's only one compare, so make up for this
1676 here as special exception if TER would have made the same change. */
1677 if (gimple_cond_single_var_p (stmt)
1679 && TREE_CODE (op0) == SSA_NAME
1680 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1682 gimple second = SSA_NAME_DEF_STMT (op0);
1683 if (gimple_code (second) == GIMPLE_ASSIGN)
1685 enum tree_code code2 = gimple_assign_rhs_code (second);
1686 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1689 op0 = gimple_assign_rhs1 (second);
1690 op1 = gimple_assign_rhs2 (second);
1692 /* If jumps are cheap turn some more codes into
1694 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1696 if ((code2 == BIT_AND_EXPR
1697 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1698 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1699 || code2 == TRUTH_AND_EXPR)
1701 code = TRUTH_ANDIF_EXPR;
1702 op0 = gimple_assign_rhs1 (second);
1703 op1 = gimple_assign_rhs2 (second);
1705 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1707 code = TRUTH_ORIF_EXPR;
1708 op0 = gimple_assign_rhs1 (second);
1709 op1 = gimple_assign_rhs2 (second);
1715 last2 = last = get_last_insn ();
1717 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1718 set_curr_insn_source_location (gimple_location (stmt));
1719 set_curr_insn_block (gimple_block (stmt));
1721 /* These flags have no purpose in RTL land. */
1722 true_edge->flags &= ~EDGE_TRUE_VALUE;
1723 false_edge->flags &= ~EDGE_FALSE_VALUE;
1725 /* We can either have a pure conditional jump with one fallthru edge or
1726 two-way jump that needs to be decomposed into two basic blocks. */
1727 if (false_edge->dest == bb->next_bb)
1729 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1730 true_edge->probability);
1731 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1732 if (true_edge->goto_locus)
1734 set_curr_insn_source_location (true_edge->goto_locus);
1735 set_curr_insn_block (true_edge->goto_block);
1736 true_edge->goto_locus = curr_insn_locator ();
1738 true_edge->goto_block = NULL;
1739 false_edge->flags |= EDGE_FALLTHRU;
1740 maybe_cleanup_end_of_block (false_edge, last);
1743 if (true_edge->dest == bb->next_bb)
1745 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1746 false_edge->probability);
1747 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1748 if (false_edge->goto_locus)
1750 set_curr_insn_source_location (false_edge->goto_locus);
1751 set_curr_insn_block (false_edge->goto_block);
1752 false_edge->goto_locus = curr_insn_locator ();
1754 false_edge->goto_block = NULL;
1755 true_edge->flags |= EDGE_FALLTHRU;
1756 maybe_cleanup_end_of_block (true_edge, last);
1760 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1761 true_edge->probability);
1762 last = get_last_insn ();
1763 if (false_edge->goto_locus)
1765 set_curr_insn_source_location (false_edge->goto_locus);
1766 set_curr_insn_block (false_edge->goto_block);
1767 false_edge->goto_locus = curr_insn_locator ();
1769 false_edge->goto_block = NULL;
1770 emit_jump (label_rtx_for_bb (false_edge->dest));
1773 if (BARRIER_P (BB_END (bb)))
1774 BB_END (bb) = PREV_INSN (BB_END (bb));
1775 update_bb_for_insn (bb);
1777 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1778 dest = false_edge->dest;
1779 redirect_edge_succ (false_edge, new_bb);
1780 false_edge->flags |= EDGE_FALLTHRU;
1781 new_bb->count = false_edge->count;
1782 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1783 new_edge = make_edge (new_bb, dest, 0);
1784 new_edge->probability = REG_BR_PROB_BASE;
1785 new_edge->count = new_bb->count;
1786 if (BARRIER_P (BB_END (new_bb)))
1787 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1788 update_bb_for_insn (new_bb);
1790 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1792 if (true_edge->goto_locus)
1794 set_curr_insn_source_location (true_edge->goto_locus);
1795 set_curr_insn_block (true_edge->goto_block);
1796 true_edge->goto_locus = curr_insn_locator ();
1798 true_edge->goto_block = NULL;
1803 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1807 expand_call_stmt (gimple stmt)
1809 tree exp, decl, lhs;
1813 if (gimple_call_internal_p (stmt))
1815 expand_internal_call (stmt);
1819 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1821 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1822 decl = gimple_call_fndecl (stmt);
1823 builtin_p = decl && DECL_BUILT_IN (decl);
1825 /* If this is not a builtin function, the function type through which the
1826 call is made may be different from the type of the function. */
1829 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
1830 CALL_EXPR_FN (exp));
1832 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1833 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1835 for (i = 0; i < gimple_call_num_args (stmt); i++)
1837 tree arg = gimple_call_arg (stmt, i);
1839 /* TER addresses into arguments of builtin functions so we have a
1840 chance to infer more correct alignment information. See PR39954. */
1842 && TREE_CODE (arg) == SSA_NAME
1843 && (def = get_gimple_for_ssa_name (arg))
1844 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1845 arg = gimple_assign_rhs1 (def);
1846 CALL_EXPR_ARG (exp, i) = arg;
1849 if (gimple_has_side_effects (stmt))
1850 TREE_SIDE_EFFECTS (exp) = 1;
1852 if (gimple_call_nothrow_p (stmt))
1853 TREE_NOTHROW (exp) = 1;
1855 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1856 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1858 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1859 && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
1860 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
1862 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1863 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1864 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1865 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1866 TREE_BLOCK (exp) = gimple_block (stmt);
1868 /* Ensure RTL is created for debug args. */
1869 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
1871 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
1876 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
1878 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
1879 expand_debug_expr (dtemp);
1883 lhs = gimple_call_lhs (stmt);
1885 expand_assignment (lhs, exp, false);
1887 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1890 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1891 STMT that doesn't require special handling for outgoing edges. That
1892 is no tailcalls and no GIMPLE_COND. */
1895 expand_gimple_stmt_1 (gimple stmt)
1899 set_curr_insn_source_location (gimple_location (stmt));
1900 set_curr_insn_block (gimple_block (stmt));
1902 switch (gimple_code (stmt))
1905 op0 = gimple_goto_dest (stmt);
1906 if (TREE_CODE (op0) == LABEL_DECL)
1909 expand_computed_goto (op0);
1912 expand_label (gimple_label_label (stmt));
1915 case GIMPLE_PREDICT:
1921 expand_asm_stmt (stmt);
1924 expand_call_stmt (stmt);
1928 op0 = gimple_return_retval (stmt);
1930 if (op0 && op0 != error_mark_node)
1932 tree result = DECL_RESULT (current_function_decl);
1934 /* If we are not returning the current function's RESULT_DECL,
1935 build an assignment to it. */
1938 /* I believe that a function's RESULT_DECL is unique. */
1939 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1941 /* ??? We'd like to use simply expand_assignment here,
1942 but this fails if the value is of BLKmode but the return
1943 decl is a register. expand_return has special handling
1944 for this combination, which eventually should move
1945 to common code. See comments there. Until then, let's
1946 build a modify expression :-/ */
1947 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1952 expand_null_return ();
1954 expand_return (op0);
1959 tree lhs = gimple_assign_lhs (stmt);
1961 /* Tree expand used to fiddle with |= and &= of two bitfield
1962 COMPONENT_REFs here. This can't happen with gimple, the LHS
1963 of binary assigns must be a gimple reg. */
1965 if (TREE_CODE (lhs) != SSA_NAME
1966 || get_gimple_rhs_class (gimple_expr_code (stmt))
1967 == GIMPLE_SINGLE_RHS)
1969 tree rhs = gimple_assign_rhs1 (stmt);
1970 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1971 == GIMPLE_SINGLE_RHS);
1972 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1973 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1974 expand_assignment (lhs, rhs,
1975 gimple_assign_nontemporal_move_p (stmt));
1980 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1981 struct separate_ops ops;
1982 bool promoted = false;
1984 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1985 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1988 ops.code = gimple_assign_rhs_code (stmt);
1989 ops.type = TREE_TYPE (lhs);
1990 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1992 case GIMPLE_TERNARY_RHS:
1993 ops.op2 = gimple_assign_rhs3 (stmt);
1995 case GIMPLE_BINARY_RHS:
1996 ops.op1 = gimple_assign_rhs2 (stmt);
1998 case GIMPLE_UNARY_RHS:
1999 ops.op0 = gimple_assign_rhs1 (stmt);
2004 ops.location = gimple_location (stmt);
2006 /* If we want to use a nontemporal store, force the value to
2007 register first. If we store into a promoted register,
2008 don't directly expand to target. */
2009 temp = nontemporal || promoted ? NULL_RTX : target;
2010 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2017 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2018 /* If TEMP is a VOIDmode constant, use convert_modes to make
2019 sure that we properly convert it. */
2020 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2022 temp = convert_modes (GET_MODE (target),
2023 TYPE_MODE (ops.type),
2025 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2026 GET_MODE (target), temp, unsignedp);
2029 convert_move (SUBREG_REG (target), temp, unsignedp);
2031 else if (nontemporal && emit_storent_insn (target, temp))
2035 temp = force_operand (temp, target);
2037 emit_move_insn (target, temp);
2048 /* Expand one gimple statement STMT and return the last RTL instruction
2049 before any of the newly generated ones.
2051 In addition to generating the necessary RTL instructions this also
2052 sets REG_EH_REGION notes if necessary and sets the current source
2053 location for diagnostics. */
2056 expand_gimple_stmt (gimple stmt)
2058 location_t saved_location = input_location;
2059 rtx last = get_last_insn ();
2064 /* We need to save and restore the current source location so that errors
2065 discovered during expansion are emitted with the right location. But
2066 it would be better if the diagnostic routines used the source location
2067 embedded in the tree nodes rather than globals. */
2068 if (gimple_has_location (stmt))
2069 input_location = gimple_location (stmt);
2071 expand_gimple_stmt_1 (stmt);
2073 /* Free any temporaries used to evaluate this statement. */
2076 input_location = saved_location;
2078 /* Mark all insns that may trap. */
2079 lp_nr = lookup_stmt_eh_lp (stmt);
2083 for (insn = next_real_insn (last); insn;
2084 insn = next_real_insn (insn))
2086 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2087 /* If we want exceptions for non-call insns, any
2088 may_trap_p instruction may throw. */
2089 && GET_CODE (PATTERN (insn)) != CLOBBER
2090 && GET_CODE (PATTERN (insn)) != USE
2091 && insn_could_throw_p (insn))
2092 make_reg_eh_region_note (insn, 0, lp_nr);
2099 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2100 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2101 generated a tail call (something that might be denied by the ABI
2102 rules governing the call; see calls.c).
2104 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2105 can still reach the rest of BB. The case here is __builtin_sqrt,
2106 where the NaN result goes through the external function (with a
2107 tailcall) and the normal result happens via a sqrt instruction. */
2110 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2118 last2 = last = expand_gimple_stmt (stmt);
2120 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2121 if (CALL_P (last) && SIBLING_CALL_P (last))
2124 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2126 *can_fallthru = true;
2130 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2131 Any instructions emitted here are about to be deleted. */
2132 do_pending_stack_adjust ();
2134 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2135 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2136 EH or abnormal edges, we shouldn't have created a tail call in
2137 the first place. So it seems to me we should just be removing
2138 all edges here, or redirecting the existing fallthru edge to
2144 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2146 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2148 if (e->dest != EXIT_BLOCK_PTR)
2150 e->dest->count -= e->count;
2151 e->dest->frequency -= EDGE_FREQUENCY (e);
2152 if (e->dest->count < 0)
2154 if (e->dest->frequency < 0)
2155 e->dest->frequency = 0;
2158 probability += e->probability;
2165 /* This is somewhat ugly: the call_expr expander often emits instructions
2166 after the sibcall (to perform the function return). These confuse the
2167 find_many_sub_basic_blocks code, so we need to get rid of these. */
2168 last = NEXT_INSN (last);
2169 gcc_assert (BARRIER_P (last));
2171 *can_fallthru = false;
2172 while (NEXT_INSN (last))
2174 /* For instance an sqrt builtin expander expands if with
2175 sibcall in the then and label for `else`. */
2176 if (LABEL_P (NEXT_INSN (last)))
2178 *can_fallthru = true;
2181 delete_insn (NEXT_INSN (last));
2184 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2185 e->probability += probability;
2188 update_bb_for_insn (bb);
2190 if (NEXT_INSN (last))
2192 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2195 if (BARRIER_P (last))
2196 BB_END (bb) = PREV_INSN (last);
2199 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2204 /* Return the difference between the floor and the truncated result of
2205 a signed division by OP1 with remainder MOD. */
2207 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2209 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2210 return gen_rtx_IF_THEN_ELSE
2211 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2212 gen_rtx_IF_THEN_ELSE
2213 (mode, gen_rtx_LT (BImode,
2214 gen_rtx_DIV (mode, op1, mod),
2216 constm1_rtx, const0_rtx),
2220 /* Return the difference between the ceil and the truncated result of
2221 a signed division by OP1 with remainder MOD. */
2223 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2225 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2226 return gen_rtx_IF_THEN_ELSE
2227 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2228 gen_rtx_IF_THEN_ELSE
2229 (mode, gen_rtx_GT (BImode,
2230 gen_rtx_DIV (mode, op1, mod),
2232 const1_rtx, const0_rtx),
2236 /* Return the difference between the ceil and the truncated result of
2237 an unsigned division by OP1 with remainder MOD. */
2239 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2241 /* (mod != 0 ? 1 : 0) */
2242 return gen_rtx_IF_THEN_ELSE
2243 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2244 const1_rtx, const0_rtx);
2247 /* Return the difference between the rounded and the truncated result
2248 of a signed division by OP1 with remainder MOD. Halfway cases are
2249 rounded away from zero, rather than to the nearest even number. */
2251 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2253 /* (abs (mod) >= abs (op1) - abs (mod)
2254 ? (op1 / mod > 0 ? 1 : -1)
2256 return gen_rtx_IF_THEN_ELSE
2257 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2258 gen_rtx_MINUS (mode,
2259 gen_rtx_ABS (mode, op1),
2260 gen_rtx_ABS (mode, mod))),
2261 gen_rtx_IF_THEN_ELSE
2262 (mode, gen_rtx_GT (BImode,
2263 gen_rtx_DIV (mode, op1, mod),
2265 const1_rtx, constm1_rtx),
2269 /* Return the difference between the rounded and the truncated result
2270 of a unsigned division by OP1 with remainder MOD. Halfway cases
2271 are rounded away from zero, rather than to the nearest even
2274 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2276 /* (mod >= op1 - mod ? 1 : 0) */
2277 return gen_rtx_IF_THEN_ELSE
2278 (mode, gen_rtx_GE (BImode, mod,
2279 gen_rtx_MINUS (mode, op1, mod)),
2280 const1_rtx, const0_rtx);
2283 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2287 convert_debug_memory_address (enum machine_mode mode, rtx x,
2290 enum machine_mode xmode = GET_MODE (x);
2292 #ifndef POINTERS_EXTEND_UNSIGNED
2293 gcc_assert (mode == Pmode
2294 || mode == targetm.addr_space.address_mode (as));
2295 gcc_assert (xmode == mode || xmode == VOIDmode);
2298 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2299 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2301 gcc_assert (mode == address_mode || mode == pointer_mode);
2303 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2306 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2307 x = simplify_gen_subreg (mode, x, xmode,
2308 subreg_lowpart_offset
2310 else if (POINTERS_EXTEND_UNSIGNED > 0)
2311 x = gen_rtx_ZERO_EXTEND (mode, x);
2312 else if (!POINTERS_EXTEND_UNSIGNED)
2313 x = gen_rtx_SIGN_EXTEND (mode, x);
2316 switch (GET_CODE (x))
2319 if ((SUBREG_PROMOTED_VAR_P (x)
2320 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2321 || (GET_CODE (SUBREG_REG (x)) == PLUS
2322 && REG_P (XEXP (SUBREG_REG (x), 0))
2323 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2324 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2325 && GET_MODE (SUBREG_REG (x)) == mode)
2326 return SUBREG_REG (x);
2329 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2330 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2333 temp = shallow_copy_rtx (x);
2334 PUT_MODE (temp, mode);
2337 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2339 temp = gen_rtx_CONST (mode, temp);
2343 if (CONST_INT_P (XEXP (x, 1)))
2345 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2347 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2353 /* Don't know how to express ptr_extend as operation in debug info. */
2356 #endif /* POINTERS_EXTEND_UNSIGNED */
2361 /* Return an RTX equivalent to the value of the tree expression
2365 expand_debug_expr (tree exp)
2367 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2368 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2369 enum machine_mode inner_mode = VOIDmode;
2370 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2373 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2375 case tcc_expression:
2376 switch (TREE_CODE (exp))
2380 case WIDEN_MULT_PLUS_EXPR:
2381 case WIDEN_MULT_MINUS_EXPR:
2385 case TRUTH_ANDIF_EXPR:
2386 case TRUTH_ORIF_EXPR:
2387 case TRUTH_AND_EXPR:
2389 case TRUTH_XOR_EXPR:
2392 case TRUTH_NOT_EXPR:
2401 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2408 case tcc_comparison:
2409 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2416 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2417 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2427 case tcc_exceptional:
2428 case tcc_declaration:
2434 switch (TREE_CODE (exp))
2437 if (!lookup_constant_def (exp))
2439 if (strlen (TREE_STRING_POINTER (exp)) + 1
2440 != (size_t) TREE_STRING_LENGTH (exp))
2442 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2443 op0 = gen_rtx_MEM (BLKmode, op0);
2444 set_mem_attributes (op0, exp, 0);
2447 /* Fall through... */
2452 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2456 gcc_assert (COMPLEX_MODE_P (mode));
2457 op0 = expand_debug_expr (TREE_REALPART (exp));
2458 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2459 return gen_rtx_CONCAT (mode, op0, op1);
2461 case DEBUG_EXPR_DECL:
2462 op0 = DECL_RTL_IF_SET (exp);
2467 op0 = gen_rtx_DEBUG_EXPR (mode);
2468 DEBUG_EXPR_TREE_DECL (op0) = exp;
2469 SET_DECL_RTL (exp, op0);
2479 op0 = DECL_RTL_IF_SET (exp);
2481 /* This decl was probably optimized away. */
2484 if (TREE_CODE (exp) != VAR_DECL
2485 || DECL_EXTERNAL (exp)
2486 || !TREE_STATIC (exp)
2488 || DECL_HARD_REGISTER (exp)
2489 || DECL_IN_CONSTANT_POOL (exp)
2490 || mode == VOIDmode)
2493 op0 = make_decl_rtl_for_debug (exp);
2495 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2496 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2500 op0 = copy_rtx (op0);
2502 if (GET_MODE (op0) == BLKmode
2503 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2504 below would ICE. While it is likely a FE bug,
2505 try to be robust here. See PR43166. */
2507 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2509 gcc_assert (MEM_P (op0));
2510 op0 = adjust_address_nv (op0, mode, 0);
2521 inner_mode = GET_MODE (op0);
2523 if (mode == inner_mode)
2526 if (inner_mode == VOIDmode)
2528 if (TREE_CODE (exp) == SSA_NAME)
2529 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2531 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2532 if (mode == inner_mode)
2536 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2538 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2539 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2540 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2541 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2543 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2545 else if (FLOAT_MODE_P (mode))
2547 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2548 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2549 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2551 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2553 else if (FLOAT_MODE_P (inner_mode))
2556 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2558 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2560 else if (CONSTANT_P (op0)
2561 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2562 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2563 subreg_lowpart_offset (mode,
2565 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2566 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2568 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2570 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2576 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2578 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2579 TREE_OPERAND (exp, 0),
2580 TREE_OPERAND (exp, 1));
2582 return expand_debug_expr (newexp);
2586 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2590 if (TREE_CODE (exp) == MEM_REF)
2592 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2593 || (GET_CODE (op0) == PLUS
2594 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2595 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2596 Instead just use get_inner_reference. */
2599 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2600 if (!op1 || !CONST_INT_P (op1))
2603 op0 = plus_constant (op0, INTVAL (op1));
2606 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2607 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2609 as = ADDR_SPACE_GENERIC;
2611 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2613 if (op0 == NULL_RTX)
2616 op0 = gen_rtx_MEM (mode, op0);
2617 set_mem_attributes (op0, exp, 0);
2618 if (TREE_CODE (exp) == MEM_REF
2619 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2620 set_mem_expr (op0, NULL_TREE);
2621 set_mem_addr_space (op0, as);
2625 case TARGET_MEM_REF:
2626 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2627 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2630 op0 = expand_debug_expr
2631 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2635 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2636 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2638 as = ADDR_SPACE_GENERIC;
2640 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2642 if (op0 == NULL_RTX)
2645 op0 = gen_rtx_MEM (mode, op0);
2647 set_mem_attributes (op0, exp, 0);
2648 set_mem_addr_space (op0, as);
2654 case ARRAY_RANGE_REF:
2659 case VIEW_CONVERT_EXPR:
2661 enum machine_mode mode1;
2662 HOST_WIDE_INT bitsize, bitpos;
2665 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2666 &mode1, &unsignedp, &volatilep, false);
2672 orig_op0 = op0 = expand_debug_expr (tem);
2679 enum machine_mode addrmode, offmode;
2684 op0 = XEXP (op0, 0);
2685 addrmode = GET_MODE (op0);
2686 if (addrmode == VOIDmode)
2689 op1 = expand_debug_expr (offset);
2693 offmode = GET_MODE (op1);
2694 if (offmode == VOIDmode)
2695 offmode = TYPE_MODE (TREE_TYPE (offset));
2697 if (addrmode != offmode)
2698 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2699 subreg_lowpart_offset (addrmode,
2702 /* Don't use offset_address here, we don't need a
2703 recognizable address, and we don't want to generate
2705 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2711 if (mode1 == VOIDmode)
2713 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2714 if (bitpos >= BITS_PER_UNIT)
2716 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2717 bitpos %= BITS_PER_UNIT;
2719 else if (bitpos < 0)
2722 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2723 op0 = adjust_address_nv (op0, mode1, units);
2724 bitpos += units * BITS_PER_UNIT;
2726 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2727 op0 = adjust_address_nv (op0, mode, 0);
2728 else if (GET_MODE (op0) != mode1)
2729 op0 = adjust_address_nv (op0, mode1, 0);
2731 op0 = copy_rtx (op0);
2732 if (op0 == orig_op0)
2733 op0 = shallow_copy_rtx (op0);
2734 set_mem_attributes (op0, exp, 0);
2737 if (bitpos == 0 && mode == GET_MODE (op0))
2743 if (GET_MODE (op0) == BLKmode)
2746 if ((bitpos % BITS_PER_UNIT) == 0
2747 && bitsize == GET_MODE_BITSIZE (mode1))
2749 enum machine_mode opmode = GET_MODE (op0);
2751 if (opmode == VOIDmode)
2752 opmode = TYPE_MODE (TREE_TYPE (tem));
2754 /* This condition may hold if we're expanding the address
2755 right past the end of an array that turned out not to
2756 be addressable (i.e., the address was only computed in
2757 debug stmts). The gen_subreg below would rightfully
2758 crash, and the address doesn't really exist, so just
2760 if (bitpos >= GET_MODE_BITSIZE (opmode))
2763 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2764 return simplify_gen_subreg (mode, op0, opmode,
2765 bitpos / BITS_PER_UNIT);
2768 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2769 && TYPE_UNSIGNED (TREE_TYPE (exp))
2771 : ZERO_EXTRACT, mode,
2772 GET_MODE (op0) != VOIDmode
2774 : TYPE_MODE (TREE_TYPE (tem)),
2775 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2779 return simplify_gen_unary (ABS, mode, op0, mode);
2782 return simplify_gen_unary (NEG, mode, op0, mode);
2785 return simplify_gen_unary (NOT, mode, op0, mode);
2788 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
2790 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
2793 case FIX_TRUNC_EXPR:
2794 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
2797 case POINTER_PLUS_EXPR:
2798 /* For the rare target where pointers are not the same size as
2799 size_t, we need to check for mis-matched modes and correct
2802 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2803 && GET_MODE (op0) != GET_MODE (op1))
2805 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2806 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
2809 /* We always sign-extend, regardless of the signedness of
2810 the operand, because the operand is always unsigned
2811 here even if the original C expression is signed. */
2812 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
2817 return simplify_gen_binary (PLUS, mode, op0, op1);
2820 return simplify_gen_binary (MINUS, mode, op0, op1);
2823 return simplify_gen_binary (MULT, mode, op0, op1);
2826 case TRUNC_DIV_EXPR:
2827 case EXACT_DIV_EXPR:
2829 return simplify_gen_binary (UDIV, mode, op0, op1);
2831 return simplify_gen_binary (DIV, mode, op0, op1);
2833 case TRUNC_MOD_EXPR:
2834 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
2836 case FLOOR_DIV_EXPR:
2838 return simplify_gen_binary (UDIV, mode, op0, op1);
2841 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2842 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2843 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2844 return simplify_gen_binary (PLUS, mode, div, adj);
2847 case FLOOR_MOD_EXPR:
2849 return simplify_gen_binary (UMOD, mode, op0, op1);
2852 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2853 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2854 adj = simplify_gen_unary (NEG, mode,
2855 simplify_gen_binary (MULT, mode, adj, op1),
2857 return simplify_gen_binary (PLUS, mode, mod, adj);
2863 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2864 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2865 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2866 return simplify_gen_binary (PLUS, mode, div, adj);
2870 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2871 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2872 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2873 return simplify_gen_binary (PLUS, mode, div, adj);
2879 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2880 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2881 adj = simplify_gen_unary (NEG, mode,
2882 simplify_gen_binary (MULT, mode, adj, op1),
2884 return simplify_gen_binary (PLUS, mode, mod, adj);
2888 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2889 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2890 adj = simplify_gen_unary (NEG, mode,
2891 simplify_gen_binary (MULT, mode, adj, op1),
2893 return simplify_gen_binary (PLUS, mode, mod, adj);
2896 case ROUND_DIV_EXPR:
2899 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
2900 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2901 rtx adj = round_udiv_adjust (mode, mod, op1);
2902 return simplify_gen_binary (PLUS, mode, div, adj);
2906 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
2907 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2908 rtx adj = round_sdiv_adjust (mode, mod, op1);
2909 return simplify_gen_binary (PLUS, mode, div, adj);
2912 case ROUND_MOD_EXPR:
2915 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
2916 rtx adj = round_udiv_adjust (mode, mod, op1);
2917 adj = simplify_gen_unary (NEG, mode,
2918 simplify_gen_binary (MULT, mode, adj, op1),
2920 return simplify_gen_binary (PLUS, mode, mod, adj);
2924 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
2925 rtx adj = round_sdiv_adjust (mode, mod, op1);
2926 adj = simplify_gen_unary (NEG, mode,
2927 simplify_gen_binary (MULT, mode, adj, op1),
2929 return simplify_gen_binary (PLUS, mode, mod, adj);
2933 return simplify_gen_binary (ASHIFT, mode, op0, op1);
2937 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
2939 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
2942 return simplify_gen_binary (ROTATE, mode, op0, op1);
2945 return simplify_gen_binary (ROTATERT, mode, op0, op1);
2948 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
2951 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
2954 case TRUTH_AND_EXPR:
2955 return simplify_gen_binary (AND, mode, op0, op1);
2959 return simplify_gen_binary (IOR, mode, op0, op1);
2962 case TRUTH_XOR_EXPR:
2963 return simplify_gen_binary (XOR, mode, op0, op1);
2965 case TRUTH_ANDIF_EXPR:
2966 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2968 case TRUTH_ORIF_EXPR:
2969 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2971 case TRUTH_NOT_EXPR:
2972 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
2975 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
2979 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
2983 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
2987 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
2991 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
2994 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
2996 case UNORDERED_EXPR:
2997 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3000 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3003 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3006 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3009 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3012 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3015 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3018 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3021 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3024 gcc_assert (COMPLEX_MODE_P (mode));
3025 if (GET_MODE (op0) == VOIDmode)
3026 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3027 if (GET_MODE (op1) == VOIDmode)
3028 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3029 return gen_rtx_CONCAT (mode, op0, op1);
3032 if (GET_CODE (op0) == CONCAT)
3033 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3034 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3036 GET_MODE_INNER (mode)));
3039 enum machine_mode imode = GET_MODE_INNER (mode);
3044 re = adjust_address_nv (op0, imode, 0);
3045 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3049 enum machine_mode ifmode = int_mode_for_mode (mode);
3050 enum machine_mode ihmode = int_mode_for_mode (imode);
3052 if (ifmode == BLKmode || ihmode == BLKmode)
3054 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3057 re = gen_rtx_SUBREG (ifmode, re, 0);
3058 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3059 if (imode != ihmode)
3060 re = gen_rtx_SUBREG (imode, re, 0);
3061 im = copy_rtx (op0);
3063 im = gen_rtx_SUBREG (ifmode, im, 0);
3064 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3065 if (imode != ihmode)
3066 im = gen_rtx_SUBREG (imode, im, 0);
3068 im = gen_rtx_NEG (imode, im);
3069 return gen_rtx_CONCAT (mode, re, im);
3073 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3074 if (!op0 || !MEM_P (op0))
3076 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3077 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3078 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3079 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3080 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3082 if (handled_component_p (TREE_OPERAND (exp, 0)))
3084 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3086 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3087 &bitoffset, &bitsize, &maxsize);
3088 if ((TREE_CODE (decl) == VAR_DECL
3089 || TREE_CODE (decl) == PARM_DECL
3090 || TREE_CODE (decl) == RESULT_DECL)
3091 && !TREE_ADDRESSABLE (decl)
3092 && (bitoffset % BITS_PER_UNIT) == 0
3094 && bitsize == maxsize)
3095 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3096 bitoffset / BITS_PER_UNIT);
3102 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3103 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3108 exp = build_constructor_from_list (TREE_TYPE (exp),
3109 TREE_VECTOR_CST_ELTS (exp));
3113 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3118 op0 = gen_rtx_CONCATN
3119 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3121 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3123 op1 = expand_debug_expr (val);
3126 XVECEXP (op0, 0, i) = op1;
3129 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3131 op1 = expand_debug_expr
3132 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3137 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3138 XVECEXP (op0, 0, i) = op1;
3144 goto flag_unsupported;
3147 /* ??? Maybe handle some builtins? */
3152 gimple g = get_gimple_for_ssa_name (exp);
3155 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3161 int part = var_to_partition (SA.map, exp);
3163 if (part == NO_PARTITION)
3165 /* If this is a reference to an incoming value of parameter
3166 that is never used in the code or where the incoming
3167 value is never used in the code, use PARM_DECL's
3169 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3170 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3172 rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
3174 && GET_MODE (incoming) != BLKmode
3175 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3176 || (MEM_P (incoming)
3177 && REG_P (XEXP (incoming, 0))
3178 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3180 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3181 ENTRY_VALUE_EXP (op0) = incoming;
3186 && !TREE_ADDRESSABLE (SSA_NAME_VAR (exp))
3187 && GET_MODE (incoming) != BLKmode
3188 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3189 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3190 && XEXP (XEXP (incoming, 0), 0)
3191 == virtual_incoming_args_rtx
3192 && CONST_INT_P (XEXP (XEXP (incoming, 0),
3198 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3206 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3208 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3216 /* Vector stuff. For most of the codes we don't have rtl codes. */
3217 case REALIGN_LOAD_EXPR:
3218 case REDUC_MAX_EXPR:
3219 case REDUC_MIN_EXPR:
3220 case REDUC_PLUS_EXPR:
3222 case VEC_EXTRACT_EVEN_EXPR:
3223 case VEC_EXTRACT_ODD_EXPR:
3224 case VEC_INTERLEAVE_HIGH_EXPR:
3225 case VEC_INTERLEAVE_LOW_EXPR:
3226 case VEC_LSHIFT_EXPR:
3227 case VEC_PACK_FIX_TRUNC_EXPR:
3228 case VEC_PACK_SAT_EXPR:
3229 case VEC_PACK_TRUNC_EXPR:
3230 case VEC_RSHIFT_EXPR:
3231 case VEC_UNPACK_FLOAT_HI_EXPR:
3232 case VEC_UNPACK_FLOAT_LO_EXPR:
3233 case VEC_UNPACK_HI_EXPR:
3234 case VEC_UNPACK_LO_EXPR:
3235 case VEC_WIDEN_MULT_HI_EXPR:
3236 case VEC_WIDEN_MULT_LO_EXPR:
3240 case ADDR_SPACE_CONVERT_EXPR:
3241 case FIXED_CONVERT_EXPR:
3243 case WITH_SIZE_EXPR:
3247 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3248 && SCALAR_INT_MODE_P (mode))
3251 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3253 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3256 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3258 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3260 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3261 return simplify_gen_binary (PLUS, mode, op0, op2);
3265 case WIDEN_MULT_EXPR:
3266 case WIDEN_MULT_PLUS_EXPR:
3267 case WIDEN_MULT_MINUS_EXPR:
3268 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3269 && SCALAR_INT_MODE_P (mode))
3271 inner_mode = GET_MODE (op0);
3272 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3273 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3275 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3276 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3277 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3279 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3280 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3281 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3283 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3284 return simplify_gen_binary (PLUS, mode, op0, op2);
3286 return simplify_gen_binary (MINUS, mode, op2, op0);
3290 case WIDEN_SUM_EXPR:
3291 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3292 && SCALAR_INT_MODE_P (mode))
3295 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3297 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3299 return simplify_gen_binary (PLUS, mode, op0, op1);
3304 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3308 #ifdef ENABLE_CHECKING
3317 /* Return an RTX equivalent to the source bind value of the tree expression
3321 expand_debug_source_expr (tree exp)
3324 enum machine_mode mode = VOIDmode, inner_mode;
3326 switch (TREE_CODE (exp))
3330 rtx incoming = DECL_INCOMING_RTL (exp);
3331 mode = DECL_MODE (exp);
3333 && GET_MODE (incoming) != BLKmode
3334 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3335 || (MEM_P (incoming)
3336 && REG_P (XEXP (incoming, 0))
3337 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3339 op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3340 ENTRY_VALUE_EXP (op0) = incoming;
3345 && !TREE_ADDRESSABLE (exp)
3346 && GET_MODE (incoming) != BLKmode
3347 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3348 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3349 && XEXP (XEXP (incoming, 0), 0)
3350 == virtual_incoming_args_rtx
3351 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3356 /* See if this isn't an argument that has been completely
3358 if (!DECL_RTL_SET_P (exp)
3359 && incoming == NULL_RTX
3360 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3363 if (DECL_ABSTRACT_ORIGIN (exp))
3364 aexp = DECL_ABSTRACT_ORIGIN (exp);
3365 if (DECL_CONTEXT (aexp)
3366 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3368 VEC(tree, gc) **debug_args;
3371 #ifdef ENABLE_CHECKING
3373 for (parm = DECL_ARGUMENTS (current_function_decl);
3374 parm; parm = DECL_CHAIN (parm))
3375 gcc_assert (parm != exp
3376 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3378 debug_args = decl_debug_args_lookup (current_function_decl);
3379 if (debug_args != NULL)
3381 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3384 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3394 if (op0 == NULL_RTX)
3397 inner_mode = GET_MODE (op0);
3398 if (mode == inner_mode)
3401 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3403 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3404 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3405 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3406 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3408 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3410 else if (FLOAT_MODE_P (mode))
3412 else if (FLOAT_MODE_P (inner_mode))
3414 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3415 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3417 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3419 else if (CONSTANT_P (op0)
3420 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3421 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3422 subreg_lowpart_offset (mode, inner_mode));
3423 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3424 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3426 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3431 /* Expand the _LOCs in debug insns. We run this after expanding all
3432 regular insns, so that any variables referenced in the function
3433 will have their DECL_RTLs set. */
3436 expand_debug_locations (void)
3439 rtx last = get_last_insn ();
3440 int save_strict_alias = flag_strict_aliasing;
3442 /* New alias sets while setting up memory attributes cause
3443 -fcompare-debug failures, even though it doesn't bring about any
3445 flag_strict_aliasing = 0;
3447 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3448 if (DEBUG_INSN_P (insn))
3450 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3452 enum machine_mode mode;
3454 if (value == NULL_TREE)
3458 if (INSN_VAR_LOCATION_STATUS (insn)
3459 == VAR_INIT_STATUS_UNINITIALIZED)
3460 val = expand_debug_source_expr (value);
3462 val = expand_debug_expr (value);
3463 gcc_assert (last == get_last_insn ());
3467 val = gen_rtx_UNKNOWN_VAR_LOC ();
3470 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3472 gcc_assert (mode == GET_MODE (val)
3473 || (GET_MODE (val) == VOIDmode
3474 && (CONST_INT_P (val)
3475 || GET_CODE (val) == CONST_FIXED
3476 || GET_CODE (val) == CONST_DOUBLE
3477 || GET_CODE (val) == LABEL_REF)));
3480 INSN_VAR_LOCATION_LOC (insn) = val;
3483 flag_strict_aliasing = save_strict_alias;
3486 /* Expand basic block BB from GIMPLE trees to RTL. */
3489 expand_gimple_basic_block (basic_block bb)
3491 gimple_stmt_iterator gsi;
3500 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3503 /* Note that since we are now transitioning from GIMPLE to RTL, we
3504 cannot use the gsi_*_bb() routines because they expect the basic
3505 block to be in GIMPLE, instead of RTL. Therefore, we need to
3506 access the BB sequence directly. */
3507 stmts = bb_seq (bb);
3508 bb->il.gimple = NULL;
3509 rtl_profile_for_bb (bb);
3510 init_rtl_bb_info (bb);
3511 bb->flags |= BB_RTL;
3513 /* Remove the RETURN_EXPR if we may fall though to the exit
3515 gsi = gsi_last (stmts);
3516 if (!gsi_end_p (gsi)
3517 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3519 gimple ret_stmt = gsi_stmt (gsi);
3521 gcc_assert (single_succ_p (bb));
3522 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3524 if (bb->next_bb == EXIT_BLOCK_PTR
3525 && !gimple_return_retval (ret_stmt))
3527 gsi_remove (&gsi, false);
3528 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3532 gsi = gsi_start (stmts);
3533 if (!gsi_end_p (gsi))
3535 stmt = gsi_stmt (gsi);
3536 if (gimple_code (stmt) != GIMPLE_LABEL)
3540 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3544 last = get_last_insn ();
3548 expand_gimple_stmt (stmt);
3553 emit_label ((rtx) *elt);
3555 /* Java emits line number notes in the top of labels.
3556 ??? Make this go away once line number notes are obsoleted. */
3557 BB_HEAD (bb) = NEXT_INSN (last);
3558 if (NOTE_P (BB_HEAD (bb)))
3559 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3560 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3562 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3565 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3567 NOTE_BASIC_BLOCK (note) = bb;
3569 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3573 stmt = gsi_stmt (gsi);
3575 /* If this statement is a non-debug one, and we generate debug
3576 insns, then this one might be the last real use of a TERed
3577 SSA_NAME, but where there are still some debug uses further
3578 down. Expanding the current SSA name in such further debug
3579 uses by their RHS might lead to wrong debug info, as coalescing
3580 might make the operands of such RHS be placed into the same
3581 pseudo as something else. Like so:
3582 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3586 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3587 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3588 the write to a_2 would actually have clobbered the place which
3591 So, instead of that, we recognize the situation, and generate
3592 debug temporaries at the last real use of TERed SSA names:
3599 if (MAY_HAVE_DEBUG_INSNS
3601 && !is_gimple_debug (stmt))
3607 location_t sloc = get_curr_insn_source_location ();
3608 tree sblock = get_curr_insn_block ();
3610 /* Look for SSA names that have their last use here (TERed
3611 names always have only one real use). */
3612 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3613 if ((def = get_gimple_for_ssa_name (op)))
3615 imm_use_iterator imm_iter;
3616 use_operand_p use_p;
3617 bool have_debug_uses = false;
3619 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3621 if (gimple_debug_bind_p (USE_STMT (use_p)))
3623 have_debug_uses = true;
3628 if (have_debug_uses)
3630 /* OP is a TERed SSA name, with DEF it's defining
3631 statement, and where OP is used in further debug
3632 instructions. Generate a debug temporary, and
3633 replace all uses of OP in debug insns with that
3636 tree value = gimple_assign_rhs_to_tree (def);
3637 tree vexpr = make_node (DEBUG_EXPR_DECL);
3639 enum machine_mode mode;
3641 set_curr_insn_source_location (gimple_location (def));
3642 set_curr_insn_block (gimple_block (def));
3644 DECL_ARTIFICIAL (vexpr) = 1;
3645 TREE_TYPE (vexpr) = TREE_TYPE (value);
3647 mode = DECL_MODE (value);
3649 mode = TYPE_MODE (TREE_TYPE (value));
3650 DECL_MODE (vexpr) = mode;
3652 val = gen_rtx_VAR_LOCATION
3653 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3655 emit_debug_insn (val);
3657 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3659 if (!gimple_debug_bind_p (debugstmt))
3662 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3663 SET_USE (use_p, vexpr);
3665 update_stmt (debugstmt);
3669 set_curr_insn_source_location (sloc);
3670 set_curr_insn_block (sblock);
3673 currently_expanding_gimple_stmt = stmt;
3675 /* Expand this statement, then evaluate the resulting RTL and
3676 fixup the CFG accordingly. */
3677 if (gimple_code (stmt) == GIMPLE_COND)
3679 new_bb = expand_gimple_cond (bb, stmt);
3683 else if (gimple_debug_bind_p (stmt))
3685 location_t sloc = get_curr_insn_source_location ();
3686 tree sblock = get_curr_insn_block ();
3687 gimple_stmt_iterator nsi = gsi;
3691 tree var = gimple_debug_bind_get_var (stmt);
3694 enum machine_mode mode;
3696 if (gimple_debug_bind_has_value_p (stmt))
3697 value = gimple_debug_bind_get_value (stmt);
3701 last = get_last_insn ();
3703 set_curr_insn_source_location (gimple_location (stmt));
3704 set_curr_insn_block (gimple_block (stmt));
3707 mode = DECL_MODE (var);
3709 mode = TYPE_MODE (TREE_TYPE (var));
3711 val = gen_rtx_VAR_LOCATION
3712 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3714 emit_debug_insn (val);
3716 if (dump_file && (dump_flags & TDF_DETAILS))
3718 /* We can't dump the insn with a TREE where an RTX
3720 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3721 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3722 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3725 /* In order not to generate too many debug temporaries,
3726 we delink all uses of debug statements we already expanded.
3727 Therefore debug statements between definition and real
3728 use of TERed SSA names will continue to use the SSA name,
3729 and not be replaced with debug temps. */
3730 delink_stmt_imm_use (stmt);
3734 if (gsi_end_p (nsi))
3736 stmt = gsi_stmt (nsi);
3737 if (!gimple_debug_bind_p (stmt))
3741 set_curr_insn_source_location (sloc);
3742 set_curr_insn_block (sblock);
3744 else if (gimple_debug_source_bind_p (stmt))
3746 location_t sloc = get_curr_insn_source_location ();
3747 tree sblock = get_curr_insn_block ();
3748 tree var = gimple_debug_source_bind_get_var (stmt);
3749 tree value = gimple_debug_source_bind_get_value (stmt);
3751 enum machine_mode mode;
3753 last = get_last_insn ();
3755 set_curr_insn_source_location (gimple_location (stmt));
3756 set_curr_insn_block (gimple_block (stmt));
3758 mode = DECL_MODE (var);
3760 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
3761 VAR_INIT_STATUS_UNINITIALIZED);
3763 emit_debug_insn (val);
3765 if (dump_file && (dump_flags & TDF_DETAILS))
3767 /* We can't dump the insn with a TREE where an RTX
3769 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3770 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3771 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3774 set_curr_insn_source_location (sloc);
3775 set_curr_insn_block (sblock);
3779 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3782 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3793 def_operand_p def_p;
3794 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3798 /* Ignore this stmt if it is in the list of
3799 replaceable expressions. */
3801 && bitmap_bit_p (SA.values,
3802 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3805 last = expand_gimple_stmt (stmt);
3806 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3811 currently_expanding_gimple_stmt = NULL;
3813 /* Expand implicit goto and convert goto_locus. */
3814 FOR_EACH_EDGE (e, ei, bb->succs)
3816 if (e->goto_locus && e->goto_block)
3818 set_curr_insn_source_location (e->goto_locus);
3819 set_curr_insn_block (e->goto_block);
3820 e->goto_locus = curr_insn_locator ();
3822 e->goto_block = NULL;
3823 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3825 emit_jump (label_rtx_for_bb (e->dest));
3826 e->flags &= ~EDGE_FALLTHRU;
3830 /* Expanded RTL can create a jump in the last instruction of block.
3831 This later might be assumed to be a jump to successor and break edge insertion.
3832 We need to insert dummy move to prevent this. PR41440. */
3833 if (single_succ_p (bb)
3834 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3835 && (last = get_last_insn ())
3838 rtx dummy = gen_reg_rtx (SImode);
3839 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3842 do_pending_stack_adjust ();
3844 /* Find the block tail. The last insn in the block is the insn
3845 before a barrier and/or table jump insn. */
3846 last = get_last_insn ();
3847 if (BARRIER_P (last))
3848 last = PREV_INSN (last);
3849 if (JUMP_TABLE_DATA_P (last))
3850 last = PREV_INSN (PREV_INSN (last));
3853 update_bb_for_insn (bb);
3859 /* Create a basic block for initialization code. */
3862 construct_init_block (void)
3864 basic_block init_block, first_block;
3868 /* Multiple entry points not supported yet. */
3869 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3870 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3871 init_rtl_bb_info (EXIT_BLOCK_PTR);
3872 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3873 EXIT_BLOCK_PTR->flags |= BB_RTL;
3875 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3877 /* When entry edge points to first basic block, we don't need jump,
3878 otherwise we have to jump into proper target. */
3879 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3881 tree label = gimple_block_label (e->dest);
3883 emit_jump (label_rtx (label));
3887 flags = EDGE_FALLTHRU;
3889 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3892 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3893 init_block->count = ENTRY_BLOCK_PTR->count;
3896 first_block = e->dest;
3897 redirect_edge_succ (e, init_block);
3898 e = make_edge (init_block, first_block, flags);
3901 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3902 e->probability = REG_BR_PROB_BASE;
3903 e->count = ENTRY_BLOCK_PTR->count;
3905 update_bb_for_insn (init_block);
3909 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3910 found in the block tree. */
3913 set_block_levels (tree block, int level)
3917 BLOCK_NUMBER (block) = level;
3918 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3919 block = BLOCK_CHAIN (block);
3923 /* Create a block containing landing pads and similar stuff. */
3926 construct_exit_block (void)
3928 rtx head = get_last_insn ();
3930 basic_block exit_block;
3934 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3936 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3938 /* Make sure the locus is set to the end of the function, so that
3939 epilogue line numbers and warnings are set properly. */
3940 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3941 input_location = cfun->function_end_locus;
3943 /* The following insns belong to the top scope. */
3944 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3946 /* Generate rtl for function exit. */
3947 expand_function_end ();
3949 end = get_last_insn ();
3952 /* While emitting the function end we could move end of the last basic block.
3954 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3955 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3956 head = NEXT_INSN (head);
3957 exit_block = create_basic_block (NEXT_INSN (head), end,
3958 EXIT_BLOCK_PTR->prev_bb);
3959 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3960 exit_block->count = EXIT_BLOCK_PTR->count;
3963 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3965 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3966 if (!(e->flags & EDGE_ABNORMAL))
3967 redirect_edge_succ (e, exit_block);
3972 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3973 e->probability = REG_BR_PROB_BASE;
3974 e->count = EXIT_BLOCK_PTR->count;
3975 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3978 e->count -= e2->count;
3979 exit_block->count -= e2->count;
3980 exit_block->frequency -= EDGE_FREQUENCY (e2);
3984 if (exit_block->count < 0)
3985 exit_block->count = 0;
3986 if (exit_block->frequency < 0)
3987 exit_block->frequency = 0;
3988 update_bb_for_insn (exit_block);
3991 /* Helper function for discover_nonconstant_array_refs.
3992 Look for ARRAY_REF nodes with non-constant indexes and mark them
3996 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3997 void *data ATTRIBUTE_UNUSED)
4001 if (IS_TYPE_OR_DECL_P (t))
4003 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4005 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4006 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4007 && (!TREE_OPERAND (t, 2)
4008 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4009 || (TREE_CODE (t) == COMPONENT_REF
4010 && (!TREE_OPERAND (t,2)
4011 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4012 || TREE_CODE (t) == BIT_FIELD_REF
4013 || TREE_CODE (t) == REALPART_EXPR
4014 || TREE_CODE (t) == IMAGPART_EXPR
4015 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4016 || CONVERT_EXPR_P (t))
4017 t = TREE_OPERAND (t, 0);
4019 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4021 t = get_base_address (t);
4023 && DECL_MODE (t) != BLKmode)
4024 TREE_ADDRESSABLE (t) = 1;
4033 /* RTL expansion is not able to compile array references with variable
4034 offsets for arrays stored in single register. Discover such
4035 expressions and mark variables as addressable to avoid this
4039 discover_nonconstant_array_refs (void)
4042 gimple_stmt_iterator gsi;
4045 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4047 gimple stmt = gsi_stmt (gsi);
4048 if (!is_gimple_debug (stmt))
4049 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4053 /* This function sets crtl->args.internal_arg_pointer to a virtual
4054 register if DRAP is needed. Local register allocator will replace
4055 virtual_incoming_args_rtx with the virtual register. */
4058 expand_stack_alignment (void)
4061 unsigned int preferred_stack_boundary;
4063 if (! SUPPORTS_STACK_ALIGNMENT)
4066 if (cfun->calls_alloca
4067 || cfun->has_nonlocal_label
4068 || crtl->has_nonlocal_goto)
4069 crtl->need_drap = true;
4071 /* Call update_stack_boundary here again to update incoming stack
4072 boundary. It may set incoming stack alignment to a different
4073 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4074 use the minimum incoming stack alignment to check if it is OK
4075 to perform sibcall optimization since sibcall optimization will
4076 only align the outgoing stack to incoming stack boundary. */
4077 if (targetm.calls.update_stack_boundary)
4078 targetm.calls.update_stack_boundary ();
4080 /* The incoming stack frame has to be aligned at least at
4081 parm_stack_boundary. */
4082 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4084 /* Update crtl->stack_alignment_estimated and use it later to align
4085 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4086 exceptions since callgraph doesn't collect incoming stack alignment
4088 if (cfun->can_throw_non_call_exceptions
4089 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4090 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4092 preferred_stack_boundary = crtl->preferred_stack_boundary;
4093 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4094 crtl->stack_alignment_estimated = preferred_stack_boundary;
4095 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4096 crtl->stack_alignment_needed = preferred_stack_boundary;
4098 gcc_assert (crtl->stack_alignment_needed
4099 <= crtl->stack_alignment_estimated);
4101 crtl->stack_realign_needed
4102 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4103 crtl->stack_realign_tried = crtl->stack_realign_needed;
4105 crtl->stack_realign_processed = true;
4107 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4109 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4110 drap_rtx = targetm.calls.get_drap_rtx ();
4112 /* stack_realign_drap and drap_rtx must match. */
4113 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4115 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4116 if (NULL != drap_rtx)
4118 crtl->args.internal_arg_pointer = drap_rtx;
4120 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4122 fixup_tail_calls ();
4126 /* Translate the intermediate representation contained in the CFG
4127 from GIMPLE trees to RTL.
4129 We do conversion per basic block and preserve/update the tree CFG.
4130 This implies we have to do some magic as the CFG can simultaneously
4131 consist of basic blocks containing RTL and GIMPLE trees. This can
4132 confuse the CFG hooks, so be careful to not manipulate CFG during
4136 gimple_expand_cfg (void)
4138 basic_block bb, init_block;
4145 timevar_push (TV_OUT_OF_SSA);
4146 rewrite_out_of_ssa (&SA);
4147 timevar_pop (TV_OUT_OF_SSA);
4148 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4151 /* Some backends want to know that we are expanding to RTL. */
4152 currently_expanding_to_rtl = 1;
4154 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4156 insn_locators_alloc ();
4157 if (!DECL_IS_BUILTIN (current_function_decl))
4159 /* Eventually, all FEs should explicitly set function_start_locus. */
4160 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4161 set_curr_insn_source_location
4162 (DECL_SOURCE_LOCATION (current_function_decl));
4164 set_curr_insn_source_location (cfun->function_start_locus);
4167 set_curr_insn_source_location (UNKNOWN_LOCATION);
4168 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4169 prologue_locator = curr_insn_locator ();
4171 #ifdef INSN_SCHEDULING
4172 init_sched_attrs ();
4175 /* Make sure first insn is a note even if we don't want linenums.
4176 This makes sure the first insn will never be deleted.
4177 Also, final expects a note to appear there. */
4178 emit_note (NOTE_INSN_DELETED);
4180 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4181 discover_nonconstant_array_refs ();
4183 targetm.expand_to_rtl_hook ();
4184 crtl->stack_alignment_needed = STACK_BOUNDARY;
4185 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4186 crtl->stack_alignment_estimated = 0;
4187 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4188 cfun->cfg->max_jumptable_ents = 0;
4190 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4191 of the function section at exapnsion time to predict distance of calls. */
4192 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4194 /* Expand the variables recorded during gimple lowering. */
4195 timevar_push (TV_VAR_EXPAND);
4198 expand_used_vars ();
4200 var_seq = get_insns ();
4202 timevar_pop (TV_VAR_EXPAND);
4204 /* Honor stack protection warnings. */
4205 if (warn_stack_protect)
4207 if (cfun->calls_alloca)
4208 warning (OPT_Wstack_protector,
4209 "stack protector not protecting local variables: "
4210 "variable length buffer");
4211 if (has_short_buffer && !crtl->stack_protect_guard)
4212 warning (OPT_Wstack_protector,
4213 "stack protector not protecting function: "
4214 "all local arrays are less than %d bytes long",
4215 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4218 /* Set up parameters and prepare for return, for the function. */
4219 expand_function_start (current_function_decl);
4221 /* If we emitted any instructions for setting up the variables,
4222 emit them before the FUNCTION_START note. */
4225 emit_insn_before (var_seq, parm_birth_insn);
4227 /* In expand_function_end we'll insert the alloca save/restore
4228 before parm_birth_insn. We've just insertted an alloca call.
4229 Adjust the pointer to match. */
4230 parm_birth_insn = var_seq;
4233 /* Now that we also have the parameter RTXs, copy them over to our
4235 for (i = 0; i < SA.map->num_partitions; i++)
4237 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4239 if (TREE_CODE (var) != VAR_DECL
4240 && !SA.partition_to_pseudo[i])
4241 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4242 gcc_assert (SA.partition_to_pseudo[i]);
4244 /* If this decl was marked as living in multiple places, reset
4245 this now to NULL. */
4246 if (DECL_RTL_IF_SET (var) == pc_rtx)
4247 SET_DECL_RTL (var, NULL);
4249 /* Some RTL parts really want to look at DECL_RTL(x) when x
4250 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4251 SET_DECL_RTL here making this available, but that would mean
4252 to select one of the potentially many RTLs for one DECL. Instead
4253 of doing that we simply reset the MEM_EXPR of the RTL in question,
4254 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4255 if (!DECL_RTL_SET_P (var))
4257 if (MEM_P (SA.partition_to_pseudo[i]))
4258 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4262 /* If this function is `main', emit a call to `__main'
4263 to run global initializers, etc. */
4264 if (DECL_NAME (current_function_decl)
4265 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4266 && DECL_FILE_SCOPE_P (current_function_decl))
4267 expand_main_function ();
4269 /* Initialize the stack_protect_guard field. This must happen after the
4270 call to __main (if any) so that the external decl is initialized. */
4271 if (crtl->stack_protect_guard)
4272 stack_protect_prologue ();
4274 expand_phi_nodes (&SA);
4276 /* Register rtl specific functions for cfg. */
4277 rtl_register_cfg_hooks ();
4279 init_block = construct_init_block ();
4281 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4282 remaining edges later. */
4283 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4284 e->flags &= ~EDGE_EXECUTABLE;
4286 lab_rtx_for_bb = pointer_map_create ();
4287 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4288 bb = expand_gimple_basic_block (bb);
4290 if (MAY_HAVE_DEBUG_INSNS)
4291 expand_debug_locations ();
4293 execute_free_datastructures ();
4294 timevar_push (TV_OUT_OF_SSA);
4295 finish_out_of_ssa (&SA);
4296 timevar_pop (TV_OUT_OF_SSA);
4298 timevar_push (TV_POST_EXPAND);
4299 /* We are no longer in SSA form. */
4300 cfun->gimple_df->in_ssa_p = false;
4302 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4303 conservatively to true until they are all profile aware. */
4304 pointer_map_destroy (lab_rtx_for_bb);
4307 construct_exit_block ();
4308 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4309 insn_locators_finalize ();
4311 /* Zap the tree EH table. */
4312 set_eh_throw_stmt_table (cfun, NULL);
4314 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4315 split edges which edge insertions might do. */
4316 rebuild_jump_labels (get_insns ());
4318 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4322 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4326 rebuild_jump_labels_chain (e->insns.r);
4327 /* Avoid putting insns before parm_birth_insn. */
4328 if (e->src == ENTRY_BLOCK_PTR
4329 && single_succ_p (ENTRY_BLOCK_PTR)
4332 rtx insns = e->insns.r;
4333 e->insns.r = NULL_RTX;
4334 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4337 commit_one_edge_insertion (e);
4344 /* We're done expanding trees to RTL. */
4345 currently_expanding_to_rtl = 0;
4347 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4351 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4353 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4354 e->flags &= ~EDGE_EXECUTABLE;
4356 /* At the moment not all abnormal edges match the RTL
4357 representation. It is safe to remove them here as
4358 find_many_sub_basic_blocks will rediscover them.
4359 In the future we should get this fixed properly. */
4360 if ((e->flags & EDGE_ABNORMAL)
4361 && !(e->flags & EDGE_SIBCALL))
4368 blocks = sbitmap_alloc (last_basic_block);
4369 sbitmap_ones (blocks);
4370 find_many_sub_basic_blocks (blocks);
4371 sbitmap_free (blocks);
4372 purge_all_dead_edges ();
4376 expand_stack_alignment ();
4378 #ifdef ENABLE_CHECKING
4379 verify_flow_info ();
4382 /* There's no need to defer outputting this function any more; we
4383 know we want to output it. */
4384 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4386 /* Now that we're done expanding trees to RTL, we shouldn't have any
4387 more CONCATs anywhere. */
4388 generating_concat_p = 0;
4393 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4394 /* And the pass manager will dump RTL for us. */
4397 /* If we're emitting a nested function, make sure its parent gets
4398 emitted as well. Doing otherwise confuses debug info. */
4401 for (parent = DECL_CONTEXT (current_function_decl);
4402 parent != NULL_TREE;
4403 parent = get_containing_scope (parent))
4404 if (TREE_CODE (parent) == FUNCTION_DECL)
4405 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4408 /* We are now committed to emitting code for this function. Do any
4409 preparation, such as emitting abstract debug info for the inline
4410 before it gets mangled by optimization. */
4411 if (cgraph_function_possibly_inlined_p (current_function_decl))
4412 (*debug_hooks->outlining_inline_function) (current_function_decl);
4414 TREE_ASM_WRITTEN (current_function_decl) = 1;
4416 /* After expanding, the return labels are no longer needed. */
4417 return_label = NULL;
4418 naked_return_label = NULL;
4419 /* Tag the blocks with a depth number so that change_scope can find
4420 the common parent easily. */
4421 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4422 default_rtl_profile ();
4423 timevar_pop (TV_POST_EXPAND);
4427 struct rtl_opt_pass pass_expand =
4431 "expand", /* name */
4433 gimple_expand_cfg, /* execute */
4436 0, /* static_pass_number */
4437 TV_EXPAND, /* tv_id */
4438 PROP_ssa | PROP_gimple_leh | PROP_cfg
4439 | PROP_gimple_lcx, /* properties_required */
4440 PROP_rtl, /* properties_provided */
4441 PROP_ssa | PROP_trees, /* properties_destroyed */
4442 TODO_verify_ssa | TODO_verify_flow
4443 | TODO_verify_stmts, /* todo_flags_start */
4444 TODO_ggc_collect /* todo_flags_finish */