1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "tree-iterator.h"
33 #include "tree-inline.h"
34 #include "diagnostic.h"
35 #include "langhooks.h"
36 #include "langhooks-def.h"
37 #include "tree-flow.h"
51 #include "pointer-set.h"
52 #include "splay-tree.h"
57 enum gimplify_omp_var_data
63 GOVD_FIRSTPRIVATE = 16,
64 GOVD_LASTPRIVATE = 32,
67 GOVD_DEBUG_PRIVATE = 256,
68 GOVD_PRIVATE_OUTER_REF = 512,
69 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
70 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
79 ORT_COMBINED_PARALLEL = 3
82 struct gimplify_omp_ctx
84 struct gimplify_omp_ctx *outer_context;
86 struct pointer_set_t *privatized_types;
88 enum omp_clause_default_kind default_kind;
89 enum omp_region_type region_type;
92 static struct gimplify_ctx *gimplify_ctxp;
93 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
96 /* Formal (expression) temporary table handling: Multiple occurrences of
97 the same scalar expression are evaluated into the same temporary. */
99 typedef struct gimple_temp_hash_elt
102 tree temp; /* Value */
105 /* Forward declarations. */
106 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
108 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
109 form and we don't do any syntax checking. */
111 mark_addressable (tree x)
113 while (handled_component_p (x))
114 x = TREE_OPERAND (x, 0);
115 if (TREE_CODE (x) != VAR_DECL
116 && TREE_CODE (x) != PARM_DECL
117 && TREE_CODE (x) != RESULT_DECL)
119 TREE_ADDRESSABLE (x) = 1;
122 /* Return a hash value for a formal temporary table entry. */
125 gimple_tree_hash (const void *p)
127 tree t = ((const elt_t *) p)->val;
128 return iterative_hash_expr (t, 0);
131 /* Compare two formal temporary table entries. */
134 gimple_tree_eq (const void *p1, const void *p2)
136 tree t1 = ((const elt_t *) p1)->val;
137 tree t2 = ((const elt_t *) p2)->val;
138 enum tree_code code = TREE_CODE (t1);
140 if (TREE_CODE (t2) != code
141 || TREE_TYPE (t1) != TREE_TYPE (t2))
144 if (!operand_equal_p (t1, t2, 0))
147 /* Only allow them to compare equal if they also hash equal; otherwise
148 results are nondeterminate, and we fail bootstrap comparison. */
149 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
154 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
155 *SEQ_P is NULL, a new sequence is allocated. This function is
156 similar to gimple_seq_add_stmt, but does not scan the operands.
157 During gimplification, we need to manipulate statement sequences
158 before the def/use vectors have been constructed. */
161 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
163 gimple_stmt_iterator si;
169 *seq_p = gimple_seq_alloc ();
171 si = gsi_last (*seq_p);
173 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
176 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
177 NULL, a new sequence is allocated. This function is
178 similar to gimple_seq_add_seq, but does not scan the operands.
179 During gimplification, we need to manipulate statement sequences
180 before the def/use vectors have been constructed. */
183 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
185 gimple_stmt_iterator si;
191 *dst_p = gimple_seq_alloc ();
193 si = gsi_last (*dst_p);
194 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
197 /* Set up a context for the gimplifier. */
200 push_gimplify_context (struct gimplify_ctx *c)
202 memset (c, '\0', sizeof (*c));
203 c->prev_context = gimplify_ctxp;
207 /* Tear down a context for the gimplifier. If BODY is non-null, then
208 put the temporaries into the outer BIND_EXPR. Otherwise, put them
211 BODY is not a sequence, but the first tuple in a sequence. */
214 pop_gimplify_context (gimple body)
216 struct gimplify_ctx *c = gimplify_ctxp;
218 gcc_assert (c && (c->bind_expr_stack == NULL
219 || VEC_empty (gimple, c->bind_expr_stack)));
220 VEC_free (gimple, heap, c->bind_expr_stack);
221 gimplify_ctxp = c->prev_context;
224 declare_vars (c->temps, body, false);
226 record_vars (c->temps);
229 htab_delete (c->temp_htab);
233 gimple_push_bind_expr (gimple gimple_bind)
235 if (gimplify_ctxp->bind_expr_stack == NULL)
236 gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
237 VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
241 gimple_pop_bind_expr (void)
243 VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
247 gimple_current_bind_expr (void)
249 return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
252 /* Return the stack GIMPLE_BINDs created during gimplification. */
255 gimple_bind_expr_stack (void)
257 return gimplify_ctxp->bind_expr_stack;
260 /* Returns true iff there is a COND_EXPR between us and the innermost
261 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
264 gimple_conditional_context (void)
266 return gimplify_ctxp->conditions > 0;
269 /* Note that we've entered a COND_EXPR. */
272 gimple_push_condition (void)
274 #ifdef ENABLE_GIMPLE_CHECKING
275 if (gimplify_ctxp->conditions == 0)
276 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
278 ++(gimplify_ctxp->conditions);
281 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
282 now, add any conditional cleanups we've seen to the prequeue. */
285 gimple_pop_condition (gimple_seq *pre_p)
287 int conds = --(gimplify_ctxp->conditions);
289 gcc_assert (conds >= 0);
292 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
293 gimplify_ctxp->conditional_cleanups = NULL;
297 /* A stable comparison routine for use with splay trees and DECLs. */
300 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
305 return DECL_UID (a) - DECL_UID (b);
308 /* Create a new omp construct that deals with variable remapping. */
310 static struct gimplify_omp_ctx *
311 new_omp_context (enum omp_region_type region_type)
313 struct gimplify_omp_ctx *c;
315 c = XCNEW (struct gimplify_omp_ctx);
316 c->outer_context = gimplify_omp_ctxp;
317 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
318 c->privatized_types = pointer_set_create ();
319 c->location = input_location;
320 c->region_type = region_type;
321 if (region_type != ORT_TASK)
322 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
324 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
329 /* Destroy an omp construct that deals with variable remapping. */
332 delete_omp_context (struct gimplify_omp_ctx *c)
334 splay_tree_delete (c->variables);
335 pointer_set_destroy (c->privatized_types);
339 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
340 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
342 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
345 append_to_statement_list_1 (tree t, tree *list_p)
348 tree_stmt_iterator i;
352 if (t && TREE_CODE (t) == STATEMENT_LIST)
357 *list_p = list = alloc_stmt_list ();
361 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
364 /* Add T to the end of the list container pointed to by LIST_P.
365 If T is an expression with no effects, it is ignored. */
368 append_to_statement_list (tree t, tree *list_p)
370 if (t && TREE_SIDE_EFFECTS (t))
371 append_to_statement_list_1 (t, list_p);
374 /* Similar, but the statement is always added, regardless of side effects. */
377 append_to_statement_list_force (tree t, tree *list_p)
380 append_to_statement_list_1 (t, list_p);
383 /* Both gimplify the statement T and append it to *SEQ_P. This function
384 behaves exactly as gimplify_stmt, but you don't have to pass T as a
388 gimplify_and_add (tree t, gimple_seq *seq_p)
390 gimplify_stmt (&t, seq_p);
393 /* Gimplify statement T into sequence *SEQ_P, and return the first
394 tuple in the sequence of generated tuples for this statement.
395 Return NULL if gimplifying T produced no tuples. */
398 gimplify_and_return_first (tree t, gimple_seq *seq_p)
400 gimple_stmt_iterator last = gsi_last (*seq_p);
402 gimplify_and_add (t, seq_p);
404 if (!gsi_end_p (last))
407 return gsi_stmt (last);
410 return gimple_seq_first_stmt (*seq_p);
413 /* Strip off a legitimate source ending from the input string NAME of
414 length LEN. Rather than having to know the names used by all of
415 our front ends, we strip off an ending of a period followed by
416 up to five characters. (Java uses ".class".) */
419 remove_suffix (char *name, int len)
423 for (i = 2; i < 8 && len > i; i++)
425 if (name[len - i] == '.')
427 name[len - i] = '\0';
433 /* Create a new temporary name with PREFIX. Returns an identifier. */
435 static GTY(()) unsigned int tmp_var_id_num;
438 create_tmp_var_name (const char *prefix)
444 char *preftmp = ASTRDUP (prefix);
446 remove_suffix (preftmp, strlen (preftmp));
450 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
451 return get_identifier (tmp_name);
455 /* Create a new temporary variable declaration of type TYPE.
456 Does NOT push it into the current binding. */
459 create_tmp_var_raw (tree type, const char *prefix)
464 /* Make the type of the variable writable. */
465 new_type = build_type_variant (type, 0, 0);
466 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
468 tmp_var = build_decl (input_location,
469 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
472 /* The variable was declared by the compiler. */
473 DECL_ARTIFICIAL (tmp_var) = 1;
474 /* And we don't want debug info for it. */
475 DECL_IGNORED_P (tmp_var) = 1;
477 /* Make the variable writable. */
478 TREE_READONLY (tmp_var) = 0;
480 DECL_EXTERNAL (tmp_var) = 0;
481 TREE_STATIC (tmp_var) = 0;
482 TREE_USED (tmp_var) = 1;
487 /* Create a new temporary variable declaration of type TYPE. DOES push the
488 variable into the current binding. Further, assume that this is called
489 only from gimplification or optimization, at which point the creation of
490 certain types are bugs. */
493 create_tmp_var (tree type, const char *prefix)
497 /* We don't allow types that are addressable (meaning we can't make copies),
498 or incomplete. We also used to reject every variable size objects here,
499 but now support those for which a constant upper bound can be obtained.
500 The processing for variable sizes is performed in gimple_add_tmp_var,
501 point at which it really matters and possibly reached via paths not going
502 through this function, e.g. after direct calls to create_tmp_var_raw. */
503 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
505 tmp_var = create_tmp_var_raw (type, prefix);
506 gimple_add_tmp_var (tmp_var);
510 /* Create a temporary with a name derived from VAL. Subroutine of
511 lookup_tmp_var; nobody else should call this function. */
514 create_tmp_from_val (tree val)
516 return create_tmp_var (TREE_TYPE (val), get_name (val));
519 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
520 an existing expression temporary. */
523 lookup_tmp_var (tree val, bool is_formal)
527 /* If not optimizing, never really reuse a temporary. local-alloc
528 won't allocate any variable that is used in more than one basic
529 block, which means it will go into memory, causing much extra
530 work in reload and final and poorer code generation, outweighing
531 the extra memory allocation here. */
532 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
533 ret = create_tmp_from_val (val);
540 if (gimplify_ctxp->temp_htab == NULL)
541 gimplify_ctxp->temp_htab
542 = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
543 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
546 elt_p = XNEW (elt_t);
548 elt_p->temp = ret = create_tmp_from_val (val);
549 *slot = (void *) elt_p;
553 elt_p = (elt_t *) *slot;
562 /* Return true if T is a CALL_EXPR or an expression that can be
563 assignmed to a temporary. Note that this predicate should only be
564 used during gimplification. See the rationale for this in
565 gimplify_modify_expr. */
568 is_gimple_reg_rhs_or_call (tree t)
570 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
571 || TREE_CODE (t) == CALL_EXPR);
574 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
575 this predicate should only be used during gimplification. See the
576 rationale for this in gimplify_modify_expr. */
579 is_gimple_mem_rhs_or_call (tree t)
581 /* If we're dealing with a renamable type, either source or dest must be
582 a renamed variable. */
583 if (is_gimple_reg_type (TREE_TYPE (t)))
584 return is_gimple_val (t);
586 return (is_gimple_val (t) || is_gimple_lvalue (t)
587 || TREE_CODE (t) == CALL_EXPR);
590 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
593 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
598 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
599 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
600 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
603 t = lookup_tmp_var (val, is_formal);
606 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
607 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
608 DECL_GIMPLE_REG_P (t) = 1;
610 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
612 if (EXPR_HAS_LOCATION (val))
613 SET_EXPR_LOCATION (mod, EXPR_LOCATION (val));
615 SET_EXPR_LOCATION (mod, input_location);
617 /* gimplify_modify_expr might want to reduce this further. */
618 gimplify_and_add (mod, pre_p);
621 /* If we're gimplifying into ssa, gimplify_modify_expr will have
622 given our temporary an SSA name. Find and return it. */
623 if (gimplify_ctxp->into_ssa)
625 gimple last = gimple_seq_last_stmt (*pre_p);
626 t = gimple_get_lhs (last);
632 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
633 in gimplify_expr. Only use this function if:
635 1) The value of the unfactored expression represented by VAL will not
636 change between the initialization and use of the temporary, and
637 2) The temporary will not be otherwise modified.
639 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
640 and #2 means it is inappropriate for && temps.
642 For other cases, use get_initialized_tmp_var instead. */
645 get_formal_tmp_var (tree val, gimple_seq *pre_p)
647 return internal_get_tmp_var (val, pre_p, NULL, true);
650 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
651 are as in gimplify_expr. */
654 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
656 return internal_get_tmp_var (val, pre_p, post_p, false);
659 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
660 true, generate debug info for them; otherwise don't. */
663 declare_vars (tree vars, gimple scope, bool debug_info)
670 gcc_assert (gimple_code (scope) == GIMPLE_BIND);
672 temps = nreverse (last);
674 block = gimple_bind_block (scope);
675 gcc_assert (!block || TREE_CODE (block) == BLOCK);
676 if (!block || !debug_info)
678 TREE_CHAIN (last) = gimple_bind_vars (scope);
679 gimple_bind_set_vars (scope, temps);
683 /* We need to attach the nodes both to the BIND_EXPR and to its
684 associated BLOCK for debugging purposes. The key point here
685 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
686 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
687 if (BLOCK_VARS (block))
688 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
691 gimple_bind_set_vars (scope,
692 chainon (gimple_bind_vars (scope), temps));
693 BLOCK_VARS (block) = temps;
699 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
700 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
701 no such upper bound can be obtained. */
704 force_constant_size (tree var)
706 /* The only attempt we make is by querying the maximum size of objects
707 of the variable's type. */
709 HOST_WIDE_INT max_size;
711 gcc_assert (TREE_CODE (var) == VAR_DECL);
713 max_size = max_int_size_in_bytes (TREE_TYPE (var));
715 gcc_assert (max_size >= 0);
718 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
720 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
724 gimple_add_tmp_var (tree tmp)
726 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
728 /* Later processing assumes that the object size is constant, which might
729 not be true at this point. Force the use of a constant upper bound in
731 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
732 force_constant_size (tmp);
734 DECL_CONTEXT (tmp) = current_function_decl;
735 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
739 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
740 gimplify_ctxp->temps = tmp;
742 /* Mark temporaries local within the nearest enclosing parallel. */
743 if (gimplify_omp_ctxp)
745 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
746 while (ctx && ctx->region_type == ORT_WORKSHARE)
747 ctx = ctx->outer_context;
749 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
758 /* This case is for nested functions. We need to expose the locals
760 body_seq = gimple_body (current_function_decl);
761 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
765 /* Determines whether to assign a location to the statement GS. */
768 should_carry_location_p (gimple gs)
770 /* Don't emit a line note for a label. We particularly don't want to
771 emit one for the break label, since it doesn't actually correspond
772 to the beginning of the loop/switch. */
773 if (gimple_code (gs) == GIMPLE_LABEL)
779 /* Same, but for a tree. */
782 tree_should_carry_location_p (const_tree stmt)
784 /* Don't emit a line note for a label. We particularly don't want to
785 emit one for the break label, since it doesn't actually correspond
786 to the beginning of the loop/switch. */
787 if (TREE_CODE (stmt) == LABEL_EXPR)
790 /* Do not annotate empty statements, since it confuses gcov. */
791 if (!TREE_SIDE_EFFECTS (stmt))
797 /* Return true if a location should not be emitted for this statement
798 by annotate_one_with_location. */
801 gimple_do_not_emit_location_p (gimple g)
803 return gimple_plf (g, GF_PLF_1);
806 /* Mark statement G so a location will not be emitted by
807 annotate_one_with_location. */
810 gimple_set_do_not_emit_location (gimple g)
812 /* The PLF flags are initialized to 0 when a new tuple is created,
813 so no need to initialize it anywhere. */
814 gimple_set_plf (g, GF_PLF_1, true);
817 /* Set the location for gimple statement GS to LOCATION. */
820 annotate_one_with_location (gimple gs, location_t location)
822 if (!gimple_has_location (gs)
823 && !gimple_do_not_emit_location_p (gs)
824 && should_carry_location_p (gs))
825 gimple_set_location (gs, location);
828 /* Same, but for tree T. */
831 tree_annotate_one_with_location (tree t, location_t location)
833 if (CAN_HAVE_LOCATION_P (t)
834 && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
835 SET_EXPR_LOCATION (t, location);
839 /* Set LOCATION for all the statements after iterator GSI in sequence
840 SEQ. If GSI is pointing to the end of the sequence, start with the
841 first statement in SEQ. */
844 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
848 gsi = gsi_start (seq);
852 for (; !gsi_end_p (gsi); gsi_next (&gsi))
853 annotate_one_with_location (gsi_stmt (gsi), location);
857 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
860 annotate_all_with_location (gimple_seq stmt_p, location_t location)
862 gimple_stmt_iterator i;
864 if (gimple_seq_empty_p (stmt_p))
867 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
869 gimple gs = gsi_stmt (i);
870 annotate_one_with_location (gs, location);
874 /* Same, but for statement or statement list in *STMT_P. */
877 tree_annotate_all_with_location (tree *stmt_p, location_t location)
879 tree_stmt_iterator i;
884 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
886 tree t = tsi_stmt (i);
888 /* Assuming we've already been gimplified, we shouldn't
889 see nested chaining constructs anymore. */
890 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
891 && TREE_CODE (t) != COMPOUND_EXPR);
893 tree_annotate_one_with_location (t, location);
898 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
899 These nodes model computations that should only be done once. If we
900 were to unshare something like SAVE_EXPR(i++), the gimplification
901 process would create wrong code. */
904 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
906 enum tree_code code = TREE_CODE (*tp);
907 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
908 if (TREE_CODE_CLASS (code) == tcc_type
909 || TREE_CODE_CLASS (code) == tcc_declaration
910 || TREE_CODE_CLASS (code) == tcc_constant
911 || code == SAVE_EXPR || code == TARGET_EXPR
912 /* We can't do anything sensible with a BLOCK used as an expression,
913 but we also can't just die when we see it because of non-expression
914 uses. So just avert our eyes and cross our fingers. Silly Java. */
919 gcc_assert (code != BIND_EXPR);
920 copy_tree_r (tp, walk_subtrees, data);
926 /* Callback for walk_tree to unshare most of the shared trees rooted at
927 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
928 then *TP is deep copied by calling copy_tree_r.
930 This unshares the same trees as copy_tree_r with the exception of
931 SAVE_EXPR nodes. These nodes model computations that should only be
932 done once. If we were to unshare something like SAVE_EXPR(i++), the
933 gimplification process would create wrong code. */
936 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
937 void *data ATTRIBUTE_UNUSED)
940 enum tree_code code = TREE_CODE (t);
942 /* Skip types, decls, and constants. But we do want to look at their
943 types and the bounds of types. Mark them as visited so we properly
944 unmark their subtrees on the unmark pass. If we've already seen them,
945 don't look down further. */
946 if (TREE_CODE_CLASS (code) == tcc_type
947 || TREE_CODE_CLASS (code) == tcc_declaration
948 || TREE_CODE_CLASS (code) == tcc_constant)
950 if (TREE_VISITED (t))
953 TREE_VISITED (t) = 1;
956 /* If this node has been visited already, unshare it and don't look
958 else if (TREE_VISITED (t))
960 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
964 /* Otherwise, mark the tree as visited and keep looking. */
966 TREE_VISITED (t) = 1;
972 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
973 void *data ATTRIBUTE_UNUSED)
975 if (TREE_VISITED (*tp))
976 TREE_VISITED (*tp) = 0;
983 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
984 bodies of any nested functions if we are unsharing the entire body of
988 unshare_body (tree *body_p, tree fndecl)
990 struct cgraph_node *cgn = cgraph_node (fndecl);
992 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
993 if (body_p == &DECL_SAVED_TREE (fndecl))
994 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
995 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
998 /* Likewise, but mark all trees as not visited. */
1001 unvisit_body (tree *body_p, tree fndecl)
1003 struct cgraph_node *cgn = cgraph_node (fndecl);
1005 walk_tree (body_p, unmark_visited_r, NULL, NULL);
1006 if (body_p == &DECL_SAVED_TREE (fndecl))
1007 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1008 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
1011 /* Unconditionally make an unshared copy of EXPR. This is used when using
1012 stored expressions which span multiple functions, such as BINFO_VTABLE,
1013 as the normal unsharing process can't tell that they're shared. */
1016 unshare_expr (tree expr)
1018 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1022 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1023 contain statements and have a value. Assign its value to a temporary
1024 and give it void_type_node. Returns the temporary, or NULL_TREE if
1025 WRAPPER was already void. */
1028 voidify_wrapper_expr (tree wrapper, tree temp)
1030 tree type = TREE_TYPE (wrapper);
1031 if (type && !VOID_TYPE_P (type))
1035 /* Set p to point to the body of the wrapper. Loop until we find
1036 something that isn't a wrapper. */
1037 for (p = &wrapper; p && *p; )
1039 switch (TREE_CODE (*p))
1042 TREE_SIDE_EFFECTS (*p) = 1;
1043 TREE_TYPE (*p) = void_type_node;
1044 /* For a BIND_EXPR, the body is operand 1. */
1045 p = &BIND_EXPR_BODY (*p);
1048 case CLEANUP_POINT_EXPR:
1049 case TRY_FINALLY_EXPR:
1050 case TRY_CATCH_EXPR:
1051 TREE_SIDE_EFFECTS (*p) = 1;
1052 TREE_TYPE (*p) = void_type_node;
1053 p = &TREE_OPERAND (*p, 0);
1056 case STATEMENT_LIST:
1058 tree_stmt_iterator i = tsi_last (*p);
1059 TREE_SIDE_EFFECTS (*p) = 1;
1060 TREE_TYPE (*p) = void_type_node;
1061 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1066 /* Advance to the last statement. Set all container types to void. */
1067 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1069 TREE_SIDE_EFFECTS (*p) = 1;
1070 TREE_TYPE (*p) = void_type_node;
1080 if (p == NULL || IS_EMPTY_STMT (*p))
1084 /* The wrapper is on the RHS of an assignment that we're pushing
1086 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1087 || TREE_CODE (temp) == MODIFY_EXPR);
1088 TREE_OPERAND (temp, 1) = *p;
1093 temp = create_tmp_var (type, "retval");
1094 *p = build2 (INIT_EXPR, type, temp, *p);
1103 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1104 a temporary through which they communicate. */
1107 build_stack_save_restore (gimple *save, gimple *restore)
1111 *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1112 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1113 gimple_call_set_lhs (*save, tmp_var);
1115 *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1119 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1121 static enum gimplify_status
1122 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1124 tree bind_expr = *expr_p;
1125 bool old_save_stack = gimplify_ctxp->save_stack;
1130 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1132 /* Mark variables seen in this bind expr. */
1133 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1135 if (TREE_CODE (t) == VAR_DECL)
1137 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1139 /* Mark variable as local. */
1140 if (ctx && !is_global_var (t)
1141 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1142 || splay_tree_lookup (ctx->variables,
1143 (splay_tree_key) t) == NULL))
1144 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1146 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1148 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1149 cfun->has_local_explicit_reg_vars = true;
1152 /* Preliminarily mark non-addressed complex variables as eligible
1153 for promotion to gimple registers. We'll transform their uses
1155 We exclude complex types if not optimizing because they can be
1156 subject to partial stores in GNU C by means of the __real__ and
1157 __imag__ operators and we cannot promote them to total stores
1158 (see gimplify_modify_expr_complex_part). */
1160 && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1161 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1162 && !TREE_THIS_VOLATILE (t)
1163 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1164 && !needs_to_live_in_memory (t))
1165 DECL_GIMPLE_REG_P (t) = 1;
1168 gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1169 BIND_EXPR_BLOCK (bind_expr));
1170 gimple_push_bind_expr (gimple_bind);
1172 gimplify_ctxp->save_stack = false;
1174 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1176 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1177 gimple_bind_set_body (gimple_bind, body);
1179 if (gimplify_ctxp->save_stack)
1181 gimple stack_save, stack_restore, gs;
1182 gimple_seq cleanup, new_body;
1184 /* Save stack on entry and restore it on exit. Add a try_finally
1185 block to achieve this. Note that mudflap depends on the
1186 format of the emitted code: see mx_register_decls(). */
1187 build_stack_save_restore (&stack_save, &stack_restore);
1189 cleanup = new_body = NULL;
1190 gimplify_seq_add_stmt (&cleanup, stack_restore);
1191 gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1192 GIMPLE_TRY_FINALLY);
1194 gimplify_seq_add_stmt (&new_body, stack_save);
1195 gimplify_seq_add_stmt (&new_body, gs);
1196 gimple_bind_set_body (gimple_bind, new_body);
1199 gimplify_ctxp->save_stack = old_save_stack;
1200 gimple_pop_bind_expr ();
1202 gimplify_seq_add_stmt (pre_p, gimple_bind);
1210 *expr_p = NULL_TREE;
1214 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1215 GIMPLE value, it is assigned to a new temporary and the statement is
1216 re-written to return the temporary.
1218 PRE_P points to the sequence where side effects that must happen before
1219 STMT should be stored. */
1221 static enum gimplify_status
1222 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1225 tree ret_expr = TREE_OPERAND (stmt, 0);
1226 tree result_decl, result;
1228 if (ret_expr == error_mark_node)
1232 || TREE_CODE (ret_expr) == RESULT_DECL
1233 || ret_expr == error_mark_node)
1235 gimple ret = gimple_build_return (ret_expr);
1236 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1237 gimplify_seq_add_stmt (pre_p, ret);
1241 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1242 result_decl = NULL_TREE;
1245 result_decl = TREE_OPERAND (ret_expr, 0);
1247 /* See through a return by reference. */
1248 if (TREE_CODE (result_decl) == INDIRECT_REF)
1249 result_decl = TREE_OPERAND (result_decl, 0);
1251 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1252 || TREE_CODE (ret_expr) == INIT_EXPR)
1253 && TREE_CODE (result_decl) == RESULT_DECL);
1256 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1257 Recall that aggregate_value_p is FALSE for any aggregate type that is
1258 returned in registers. If we're returning values in registers, then
1259 we don't want to extend the lifetime of the RESULT_DECL, particularly
1260 across another call. In addition, for those aggregates for which
1261 hard_function_value generates a PARALLEL, we'll die during normal
1262 expansion of structure assignments; there's special code in expand_return
1263 to handle this case that does not exist in expand_expr. */
1265 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1266 result = result_decl;
1267 else if (gimplify_ctxp->return_temp)
1268 result = gimplify_ctxp->return_temp;
1271 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1272 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1273 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1274 DECL_GIMPLE_REG_P (result) = 1;
1276 /* ??? With complex control flow (usually involving abnormal edges),
1277 we can wind up warning about an uninitialized value for this. Due
1278 to how this variable is constructed and initialized, this is never
1279 true. Give up and never warn. */
1280 TREE_NO_WARNING (result) = 1;
1282 gimplify_ctxp->return_temp = result;
1285 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1286 Then gimplify the whole thing. */
1287 if (result != result_decl)
1288 TREE_OPERAND (ret_expr, 0) = result;
1290 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1292 ret = gimple_build_return (result);
1293 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1294 gimplify_seq_add_stmt (pre_p, ret);
1300 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1302 /* This is a variable-sized decl. Simplify its size and mark it
1303 for deferred expansion. Note that mudflap depends on the format
1304 of the emitted code: see mx_register_decls(). */
1305 tree t, addr, ptr_type;
1307 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1308 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1310 /* All occurrences of this decl in final gimplified code will be
1311 replaced by indirection. Setting DECL_VALUE_EXPR does two
1312 things: First, it lets the rest of the gimplifier know what
1313 replacement to use. Second, it lets the debug info know
1314 where to find the value. */
1315 ptr_type = build_pointer_type (TREE_TYPE (decl));
1316 addr = create_tmp_var (ptr_type, get_name (decl));
1317 DECL_IGNORED_P (addr) = 0;
1318 t = build_fold_indirect_ref (addr);
1319 SET_DECL_VALUE_EXPR (decl, t);
1320 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1322 t = built_in_decls[BUILT_IN_ALLOCA];
1323 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1324 t = fold_convert (ptr_type, t);
1325 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1327 gimplify_and_add (t, seq_p);
1329 /* Indicate that we need to restore the stack level when the
1330 enclosing BIND_EXPR is exited. */
1331 gimplify_ctxp->save_stack = true;
1335 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1336 and initialization explicit. */
1338 static enum gimplify_status
1339 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1341 tree stmt = *stmt_p;
1342 tree decl = DECL_EXPR_DECL (stmt);
1344 *stmt_p = NULL_TREE;
1346 if (TREE_TYPE (decl) == error_mark_node)
1349 if ((TREE_CODE (decl) == TYPE_DECL
1350 || TREE_CODE (decl) == VAR_DECL)
1351 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1352 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1354 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1356 tree init = DECL_INITIAL (decl);
1358 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1359 || (!TREE_STATIC (decl)
1360 && flag_stack_check == GENERIC_STACK_CHECK
1361 && compare_tree_int (DECL_SIZE_UNIT (decl),
1362 STACK_CHECK_MAX_VAR_SIZE) > 0))
1363 gimplify_vla_decl (decl, seq_p);
1365 if (init && init != error_mark_node)
1367 if (!TREE_STATIC (decl))
1369 DECL_INITIAL (decl) = NULL_TREE;
1370 init = build2 (INIT_EXPR, void_type_node, decl, init);
1371 gimplify_and_add (init, seq_p);
1375 /* We must still examine initializers for static variables
1376 as they may contain a label address. */
1377 walk_tree (&init, force_labels_r, NULL, NULL);
1380 /* Some front ends do not explicitly declare all anonymous
1381 artificial variables. We compensate here by declaring the
1382 variables, though it would be better if the front ends would
1383 explicitly declare them. */
1384 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1385 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1386 gimple_add_tmp_var (decl);
1392 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1393 and replacing the LOOP_EXPR with goto, but if the loop contains an
1394 EXIT_EXPR, we need to append a label for it to jump to. */
1396 static enum gimplify_status
1397 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1399 tree saved_label = gimplify_ctxp->exit_label;
1400 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1402 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1404 gimplify_ctxp->exit_label = NULL_TREE;
1406 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1408 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1410 if (gimplify_ctxp->exit_label)
1411 gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
1413 gimplify_ctxp->exit_label = saved_label;
1419 /* Gimplifies a statement list onto a sequence. These may be created either
1420 by an enlightened front-end, or by shortcut_cond_expr. */
1422 static enum gimplify_status
1423 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1425 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1427 tree_stmt_iterator i = tsi_start (*expr_p);
1429 while (!tsi_end_p (i))
1431 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1444 /* Compare two case labels. Because the front end should already have
1445 made sure that case ranges do not overlap, it is enough to only compare
1446 the CASE_LOW values of each case label. */
1449 compare_case_labels (const void *p1, const void *p2)
1451 const_tree const case1 = *(const_tree const*)p1;
1452 const_tree const case2 = *(const_tree const*)p2;
1454 /* The 'default' case label always goes first. */
1455 if (!CASE_LOW (case1))
1457 else if (!CASE_LOW (case2))
1460 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1464 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1467 sort_case_labels (VEC(tree,heap)* label_vec)
1469 size_t len = VEC_length (tree, label_vec);
1470 qsort (VEC_address (tree, label_vec), len, sizeof (tree),
1471 compare_case_labels);
1475 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1478 static enum gimplify_status
1479 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1481 tree switch_expr = *expr_p;
1482 gimple_seq switch_body_seq = NULL;
1483 enum gimplify_status ret;
1485 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1487 if (ret == GS_ERROR || ret == GS_UNHANDLED)
1490 if (SWITCH_BODY (switch_expr))
1492 VEC (tree,heap) *labels;
1493 VEC (tree,heap) *saved_labels;
1494 tree default_case = NULL_TREE;
1496 gimple gimple_switch;
1498 /* If someone can be bothered to fill in the labels, they can
1499 be bothered to null out the body too. */
1500 gcc_assert (!SWITCH_LABELS (switch_expr));
1502 /* save old labels, get new ones from body, then restore the old
1503 labels. Save all the things from the switch body to append after. */
1504 saved_labels = gimplify_ctxp->case_labels;
1505 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1507 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1508 labels = gimplify_ctxp->case_labels;
1509 gimplify_ctxp->case_labels = saved_labels;
1512 while (i < VEC_length (tree, labels))
1514 tree elt = VEC_index (tree, labels, i);
1515 tree low = CASE_LOW (elt);
1516 bool remove_element = FALSE;
1520 /* Discard empty ranges. */
1521 tree high = CASE_HIGH (elt);
1522 if (high && tree_int_cst_lt (high, low))
1523 remove_element = TRUE;
1527 /* The default case must be the last label in the list. */
1528 gcc_assert (!default_case);
1530 remove_element = TRUE;
1534 VEC_ordered_remove (tree, labels, i);
1540 if (!VEC_empty (tree, labels))
1541 sort_case_labels (labels);
1545 tree type = TREE_TYPE (switch_expr);
1547 /* If the switch has no default label, add one, so that we jump
1548 around the switch body. If the labels already cover the whole
1549 range of type, add the default label pointing to one of the
1551 if (type == void_type_node)
1552 type = TREE_TYPE (SWITCH_COND (switch_expr));
1554 && INTEGRAL_TYPE_P (type)
1555 && TYPE_MIN_VALUE (type)
1556 && TYPE_MAX_VALUE (type)
1557 && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1558 TYPE_MIN_VALUE (type)))
1560 tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1562 high = CASE_LOW (VEC_index (tree, labels, len - 1));
1563 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1565 for (i = 1; i < len; i++)
1567 high = CASE_LOW (VEC_index (tree, labels, i));
1568 low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1570 low = CASE_LOW (VEC_index (tree, labels, i - 1));
1571 if ((TREE_INT_CST_LOW (low) + 1
1572 != TREE_INT_CST_LOW (high))
1573 || (TREE_INT_CST_HIGH (low)
1574 + (TREE_INT_CST_LOW (high) == 0)
1575 != TREE_INT_CST_HIGH (high)))
1579 default_case = build3 (CASE_LABEL_EXPR, void_type_node,
1580 NULL_TREE, NULL_TREE,
1581 CASE_LABEL (VEC_index (tree,
1591 = build3 (CASE_LABEL_EXPR, void_type_node,
1592 NULL_TREE, NULL_TREE,
1593 create_artificial_label (UNKNOWN_LOCATION));
1594 new_default = gimple_build_label (CASE_LABEL (default_case));
1595 gimplify_seq_add_stmt (&switch_body_seq, new_default);
1599 gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1600 default_case, labels);
1601 gimplify_seq_add_stmt (pre_p, gimple_switch);
1602 gimplify_seq_add_seq (pre_p, switch_body_seq);
1603 VEC_free(tree, heap, labels);
1606 gcc_assert (SWITCH_LABELS (switch_expr));
1612 static enum gimplify_status
1613 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1615 struct gimplify_ctx *ctxp;
1616 gimple gimple_label;
1618 /* Invalid OpenMP programs can play Duff's Device type games with
1619 #pragma omp parallel. At least in the C front end, we don't
1620 detect such invalid branches until after gimplification. */
1621 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1622 if (ctxp->case_labels)
1625 gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1626 VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1627 gimplify_seq_add_stmt (pre_p, gimple_label);
1632 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1636 build_and_jump (tree *label_p)
1638 if (label_p == NULL)
1639 /* If there's nowhere to jump, just fall through. */
1642 if (*label_p == NULL_TREE)
1644 tree label = create_artificial_label (UNKNOWN_LOCATION);
1648 return build1 (GOTO_EXPR, void_type_node, *label_p);
1651 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1652 This also involves building a label to jump to and communicating it to
1653 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1655 static enum gimplify_status
1656 gimplify_exit_expr (tree *expr_p)
1658 tree cond = TREE_OPERAND (*expr_p, 0);
1661 expr = build_and_jump (&gimplify_ctxp->exit_label);
1662 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1668 /* A helper function to be called via walk_tree. Mark all labels under *TP
1669 as being forced. To be called for DECL_INITIAL of static variables. */
1672 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1676 if (TREE_CODE (*tp) == LABEL_DECL)
1677 FORCED_LABEL (*tp) = 1;
1682 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1683 different from its canonical type, wrap the whole thing inside a
1684 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1687 The canonical type of a COMPONENT_REF is the type of the field being
1688 referenced--unless the field is a bit-field which can be read directly
1689 in a smaller mode, in which case the canonical type is the
1690 sign-appropriate type corresponding to that mode. */
1693 canonicalize_component_ref (tree *expr_p)
1695 tree expr = *expr_p;
1698 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1700 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1701 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1703 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1705 /* One could argue that all the stuff below is not necessary for
1706 the non-bitfield case and declare it a FE error if type
1707 adjustment would be needed. */
1708 if (TREE_TYPE (expr) != type)
1710 #ifdef ENABLE_TYPES_CHECKING
1711 tree old_type = TREE_TYPE (expr);
1715 /* We need to preserve qualifiers and propagate them from
1717 type_quals = TYPE_QUALS (type)
1718 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1719 if (TYPE_QUALS (type) != type_quals)
1720 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1722 /* Set the type of the COMPONENT_REF to the underlying type. */
1723 TREE_TYPE (expr) = type;
1725 #ifdef ENABLE_TYPES_CHECKING
1726 /* It is now a FE error, if the conversion from the canonical
1727 type to the original expression type is not useless. */
1728 gcc_assert (useless_type_conversion_p (old_type, type));
1733 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1734 to foo, embed that change in the ADDR_EXPR by converting
1739 where L is the lower bound. For simplicity, only do this for constant
1741 The constraint is that the type of &array[L] is trivially convertible
1745 canonicalize_addr_expr (tree *expr_p)
1747 tree expr = *expr_p;
1748 tree addr_expr = TREE_OPERAND (expr, 0);
1749 tree datype, ddatype, pddatype;
1751 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1752 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1753 || TREE_CODE (addr_expr) != ADDR_EXPR)
1756 /* The addr_expr type should be a pointer to an array. */
1757 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1758 if (TREE_CODE (datype) != ARRAY_TYPE)
1761 /* The pointer to element type shall be trivially convertible to
1762 the expression pointer type. */
1763 ddatype = TREE_TYPE (datype);
1764 pddatype = build_pointer_type (ddatype);
1765 if (!useless_type_conversion_p (pddatype, ddatype))
1768 /* The lower bound and element sizes must be constant. */
1769 if (!TYPE_SIZE_UNIT (ddatype)
1770 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1771 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1772 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1775 /* All checks succeeded. Build a new node to merge the cast. */
1776 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1777 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1778 NULL_TREE, NULL_TREE);
1779 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1782 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1783 underneath as appropriate. */
1785 static enum gimplify_status
1786 gimplify_conversion (tree *expr_p)
1789 location_t loc = EXPR_LOCATION (*expr_p);
1790 gcc_assert (CONVERT_EXPR_P (*expr_p));
1792 /* Then strip away all but the outermost conversion. */
1793 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1795 /* And remove the outermost conversion if it's useless. */
1796 if (tree_ssa_useless_type_conversion (*expr_p))
1797 *expr_p = TREE_OPERAND (*expr_p, 0);
1799 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1800 For example this fold (subclass *)&A into &A->subclass avoiding
1801 a need for statement. */
1802 if (CONVERT_EXPR_P (*expr_p)
1803 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1804 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1805 && (tem = maybe_fold_offset_to_address
1806 (EXPR_LOCATION (*expr_p), TREE_OPERAND (*expr_p, 0),
1807 integer_zero_node, TREE_TYPE (*expr_p))) != NULL_TREE)
1810 /* If we still have a conversion at the toplevel,
1811 then canonicalize some constructs. */
1812 if (CONVERT_EXPR_P (*expr_p))
1814 tree sub = TREE_OPERAND (*expr_p, 0);
1816 /* If a NOP conversion is changing the type of a COMPONENT_REF
1817 expression, then canonicalize its type now in order to expose more
1818 redundant conversions. */
1819 if (TREE_CODE (sub) == COMPONENT_REF)
1820 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1822 /* If a NOP conversion is changing a pointer to array of foo
1823 to a pointer to foo, embed that change in the ADDR_EXPR. */
1824 else if (TREE_CODE (sub) == ADDR_EXPR)
1825 canonicalize_addr_expr (expr_p);
1828 /* If we have a conversion to a non-register type force the
1829 use of a VIEW_CONVERT_EXPR instead. */
1830 if (!is_gimple_reg_type (TREE_TYPE (*expr_p)))
1831 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1832 TREE_OPERAND (*expr_p, 0));
1837 /* Nonlocal VLAs seen in the current function. */
1838 static struct pointer_set_t *nonlocal_vlas;
1840 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1841 DECL_VALUE_EXPR, and it's worth re-examining things. */
1843 static enum gimplify_status
1844 gimplify_var_or_parm_decl (tree *expr_p)
1846 tree decl = *expr_p;
1848 /* ??? If this is a local variable, and it has not been seen in any
1849 outer BIND_EXPR, then it's probably the result of a duplicate
1850 declaration, for which we've already issued an error. It would
1851 be really nice if the front end wouldn't leak these at all.
1852 Currently the only known culprit is C++ destructors, as seen
1853 in g++.old-deja/g++.jason/binding.C. */
1854 if (TREE_CODE (decl) == VAR_DECL
1855 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1856 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1857 && decl_function_context (decl) == current_function_decl)
1859 gcc_assert (errorcount || sorrycount);
1863 /* When within an OpenMP context, notice uses of variables. */
1864 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1867 /* If the decl is an alias for another expression, substitute it now. */
1868 if (DECL_HAS_VALUE_EXPR_P (decl))
1870 tree value_expr = DECL_VALUE_EXPR (decl);
1872 /* For referenced nonlocal VLAs add a decl for debugging purposes
1873 to the current function. */
1874 if (TREE_CODE (decl) == VAR_DECL
1875 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1876 && nonlocal_vlas != NULL
1877 && TREE_CODE (value_expr) == INDIRECT_REF
1878 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1879 && decl_function_context (decl) != current_function_decl)
1881 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1882 while (ctx && ctx->region_type == ORT_WORKSHARE)
1883 ctx = ctx->outer_context;
1884 if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1886 tree copy = copy_node (decl), block;
1888 lang_hooks.dup_lang_specific_decl (copy);
1889 SET_DECL_RTL (copy, NULL_RTX);
1890 TREE_USED (copy) = 1;
1891 block = DECL_INITIAL (current_function_decl);
1892 TREE_CHAIN (copy) = BLOCK_VARS (block);
1893 BLOCK_VARS (block) = copy;
1894 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1895 DECL_HAS_VALUE_EXPR_P (copy) = 1;
1899 *expr_p = unshare_expr (value_expr);
1907 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1911 : min_lval '[' val ']'
1913 | compound_lval '[' val ']'
1914 | compound_lval '.' ID
1916 This is not part of the original SIMPLE definition, which separates
1917 array and member references, but it seems reasonable to handle them
1918 together. Also, this way we don't run into problems with union
1919 aliasing; gcc requires that for accesses through a union to alias, the
1920 union reference must be explicit, which was not always the case when we
1921 were splitting up array and member refs.
1923 PRE_P points to the sequence where side effects that must happen before
1924 *EXPR_P should be stored.
1926 POST_P points to the sequence where side effects that must happen after
1927 *EXPR_P should be stored. */
1929 static enum gimplify_status
1930 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1931 fallback_t fallback)
1934 VEC(tree,heap) *stack;
1935 enum gimplify_status ret = GS_OK, tret;
1937 location_t loc = EXPR_LOCATION (*expr_p);
1939 /* Create a stack of the subexpressions so later we can walk them in
1940 order from inner to outer. */
1941 stack = VEC_alloc (tree, heap, 10);
1943 /* We can handle anything that get_inner_reference can deal with. */
1944 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1947 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1948 if (TREE_CODE (*p) == INDIRECT_REF)
1949 *p = fold_indirect_ref_loc (loc, *p);
1951 if (handled_component_p (*p))
1953 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1954 additional COMPONENT_REFs. */
1955 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1956 && gimplify_var_or_parm_decl (p) == GS_OK)
1961 VEC_safe_push (tree, heap, stack, *p);
1964 gcc_assert (VEC_length (tree, stack));
1966 /* Now STACK is a stack of pointers to all the refs we've walked through
1967 and P points to the innermost expression.
1969 Java requires that we elaborated nodes in source order. That
1970 means we must gimplify the inner expression followed by each of
1971 the indices, in order. But we can't gimplify the inner
1972 expression until we deal with any variable bounds, sizes, or
1973 positions in order to deal with PLACEHOLDER_EXPRs.
1975 So we do this in three steps. First we deal with the annotations
1976 for any variables in the components, then we gimplify the base,
1977 then we gimplify any indices, from left to right. */
1978 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1980 tree t = VEC_index (tree, stack, i);
1982 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1984 /* Gimplify the low bound and element type size and put them into
1985 the ARRAY_REF. If these values are set, they have already been
1987 if (TREE_OPERAND (t, 2) == NULL_TREE)
1989 tree low = unshare_expr (array_ref_low_bound (t));
1990 if (!is_gimple_min_invariant (low))
1992 TREE_OPERAND (t, 2) = low;
1993 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1994 post_p, is_gimple_reg,
1996 ret = MIN (ret, tret);
2000 if (!TREE_OPERAND (t, 3))
2002 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2003 tree elmt_size = unshare_expr (array_ref_element_size (t));
2004 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2006 /* Divide the element size by the alignment of the element
2008 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2010 if (!is_gimple_min_invariant (elmt_size))
2012 TREE_OPERAND (t, 3) = elmt_size;
2013 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2014 post_p, is_gimple_reg,
2016 ret = MIN (ret, tret);
2020 else if (TREE_CODE (t) == COMPONENT_REF)
2022 /* Set the field offset into T and gimplify it. */
2023 if (!TREE_OPERAND (t, 2))
2025 tree offset = unshare_expr (component_ref_field_offset (t));
2026 tree field = TREE_OPERAND (t, 1);
2028 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2030 /* Divide the offset by its alignment. */
2031 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2033 if (!is_gimple_min_invariant (offset))
2035 TREE_OPERAND (t, 2) = offset;
2036 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2037 post_p, is_gimple_reg,
2039 ret = MIN (ret, tret);
2045 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2046 so as to match the min_lval predicate. Failure to do so may result
2047 in the creation of large aggregate temporaries. */
2048 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2049 fallback | fb_lvalue);
2050 ret = MIN (ret, tret);
2052 /* And finally, the indices and operands to BIT_FIELD_REF. During this
2053 loop we also remove any useless conversions. */
2054 for (; VEC_length (tree, stack) > 0; )
2056 tree t = VEC_pop (tree, stack);
2058 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2060 /* Gimplify the dimension. */
2061 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2063 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2064 is_gimple_val, fb_rvalue);
2065 ret = MIN (ret, tret);
2068 else if (TREE_CODE (t) == BIT_FIELD_REF)
2070 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2071 is_gimple_val, fb_rvalue);
2072 ret = MIN (ret, tret);
2073 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2074 is_gimple_val, fb_rvalue);
2075 ret = MIN (ret, tret);
2078 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2080 /* The innermost expression P may have originally had
2081 TREE_SIDE_EFFECTS set which would have caused all the outer
2082 expressions in *EXPR_P leading to P to also have had
2083 TREE_SIDE_EFFECTS set. */
2084 recalculate_side_effects (t);
2087 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
2088 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2090 canonicalize_component_ref (expr_p);
2091 ret = MIN (ret, GS_OK);
2094 VEC_free (tree, heap, stack);
2099 /* Gimplify the self modifying expression pointed to by EXPR_P
2102 PRE_P points to the list where side effects that must happen before
2103 *EXPR_P should be stored.
2105 POST_P points to the list where side effects that must happen after
2106 *EXPR_P should be stored.
2108 WANT_VALUE is nonzero iff we want to use the value of this expression
2109 in another expression. */
2111 static enum gimplify_status
2112 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2115 enum tree_code code;
2116 tree lhs, lvalue, rhs, t1;
2117 gimple_seq post = NULL, *orig_post_p = post_p;
2119 enum tree_code arith_code;
2120 enum gimplify_status ret;
2121 location_t loc = EXPR_LOCATION (*expr_p);
2123 code = TREE_CODE (*expr_p);
2125 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2126 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2128 /* Prefix or postfix? */
2129 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2130 /* Faster to treat as prefix if result is not used. */
2131 postfix = want_value;
2135 /* For postfix, make sure the inner expression's post side effects
2136 are executed after side effects from this expression. */
2140 /* Add or subtract? */
2141 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2142 arith_code = PLUS_EXPR;
2144 arith_code = MINUS_EXPR;
2146 /* Gimplify the LHS into a GIMPLE lvalue. */
2147 lvalue = TREE_OPERAND (*expr_p, 0);
2148 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2149 if (ret == GS_ERROR)
2152 /* Extract the operands to the arithmetic operation. */
2154 rhs = TREE_OPERAND (*expr_p, 1);
2156 /* For postfix operator, we evaluate the LHS to an rvalue and then use
2157 that as the result value and in the postqueue operation. We also
2158 make sure to make lvalue a minimal lval, see
2159 gcc.c-torture/execute/20040313-1.c for an example where this matters. */
2162 if (!is_gimple_min_lval (lvalue))
2164 mark_addressable (lvalue);
2165 lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2166 gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2167 lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2169 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2170 if (ret == GS_ERROR)
2174 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
2175 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2177 rhs = fold_convert_loc (loc, sizetype, rhs);
2178 if (arith_code == MINUS_EXPR)
2179 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2180 arith_code = POINTER_PLUS_EXPR;
2183 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2187 gimplify_assign (lvalue, t1, orig_post_p);
2188 gimplify_seq_add_seq (orig_post_p, post);
2194 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2200 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2203 maybe_with_size_expr (tree *expr_p)
2205 tree expr = *expr_p;
2206 tree type = TREE_TYPE (expr);
2209 /* If we've already wrapped this or the type is error_mark_node, we can't do
2211 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2212 || type == error_mark_node)
2215 /* If the size isn't known or is a constant, we have nothing to do. */
2216 size = TYPE_SIZE_UNIT (type);
2217 if (!size || TREE_CODE (size) == INTEGER_CST)
2220 /* Otherwise, make a WITH_SIZE_EXPR. */
2221 size = unshare_expr (size);
2222 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2223 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2227 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
2228 Store any side-effects in PRE_P. CALL_LOCATION is the location of
2231 static enum gimplify_status
2232 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2234 bool (*test) (tree);
2237 /* In general, we allow lvalues for function arguments to avoid
2238 extra overhead of copying large aggregates out of even larger
2239 aggregates into temporaries only to copy the temporaries to
2240 the argument list. Make optimizers happy by pulling out to
2241 temporaries those types that fit in registers. */
2242 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2243 test = is_gimple_val, fb = fb_rvalue;
2245 test = is_gimple_lvalue, fb = fb_either;
2247 /* If this is a variable sized type, we must remember the size. */
2248 maybe_with_size_expr (arg_p);
2250 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
2251 /* Make sure arguments have the same location as the function call
2253 protected_set_expr_location (*arg_p, call_location);
2255 /* There is a sequence point before a function call. Side effects in
2256 the argument list must occur before the actual call. So, when
2257 gimplifying arguments, force gimplify_expr to use an internal
2258 post queue which is then appended to the end of PRE_P. */
2259 return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2263 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2264 WANT_VALUE is true if the result of the call is desired. */
2266 static enum gimplify_status
2267 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2269 tree fndecl, parms, p;
2270 enum gimplify_status ret;
2273 bool builtin_va_start_p = FALSE;
2274 location_t loc = EXPR_LOCATION (*expr_p);
2276 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2278 /* For reliable diagnostics during inlining, it is necessary that
2279 every call_expr be annotated with file and line. */
2280 if (! EXPR_HAS_LOCATION (*expr_p))
2281 SET_EXPR_LOCATION (*expr_p, input_location);
2283 /* This may be a call to a builtin function.
2285 Builtin function calls may be transformed into different
2286 (and more efficient) builtin function calls under certain
2287 circumstances. Unfortunately, gimplification can muck things
2288 up enough that the builtin expanders are not aware that certain
2289 transformations are still valid.
2291 So we attempt transformation/gimplification of the call before
2292 we gimplify the CALL_EXPR. At this time we do not manage to
2293 transform all calls in the same manner as the expanders do, but
2294 we do transform most of them. */
2295 fndecl = get_callee_fndecl (*expr_p);
2296 if (fndecl && DECL_BUILT_IN (fndecl))
2298 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2300 if (new_tree && new_tree != *expr_p)
2302 /* There was a transformation of this call which computes the
2303 same value, but in a more efficient way. Return and try
2309 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2310 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2312 builtin_va_start_p = TRUE;
2313 if (call_expr_nargs (*expr_p) < 2)
2315 error ("too few arguments to function %<va_start%>");
2316 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2320 if (fold_builtin_next_arg (*expr_p, true))
2322 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2328 /* There is a sequence point before the call, so any side effects in
2329 the calling expression must occur before the actual call. Force
2330 gimplify_expr to use an internal post queue. */
2331 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2332 is_gimple_call_addr, fb_rvalue);
2334 nargs = call_expr_nargs (*expr_p);
2336 /* Get argument types for verification. */
2337 fndecl = get_callee_fndecl (*expr_p);
2340 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2341 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2342 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2344 if (fndecl && DECL_ARGUMENTS (fndecl))
2345 p = DECL_ARGUMENTS (fndecl);
2350 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2353 /* If the last argument is __builtin_va_arg_pack () and it is not
2354 passed as a named argument, decrease the number of CALL_EXPR
2355 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2358 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2360 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2361 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2364 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2365 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2366 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2368 tree call = *expr_p;
2371 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2372 CALL_EXPR_FN (call),
2373 nargs, CALL_EXPR_ARGP (call));
2375 /* Copy all CALL_EXPR flags, location and block, except
2376 CALL_EXPR_VA_ARG_PACK flag. */
2377 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2378 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2379 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2380 = CALL_EXPR_RETURN_SLOT_OPT (call);
2381 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2382 CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
2383 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2384 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2386 /* Set CALL_EXPR_VA_ARG_PACK. */
2387 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2391 /* Finally, gimplify the function arguments. */
2394 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2395 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2396 PUSH_ARGS_REVERSED ? i-- : i++)
2398 enum gimplify_status t;
2400 /* Avoid gimplifying the second argument to va_start, which needs to
2401 be the plain PARM_DECL. */
2402 if ((i != 1) || !builtin_va_start_p)
2404 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2405 EXPR_LOCATION (*expr_p));
2413 /* Try this again in case gimplification exposed something. */
2414 if (ret != GS_ERROR)
2416 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2418 if (new_tree && new_tree != *expr_p)
2420 /* There was a transformation of this call which computes the
2421 same value, but in a more efficient way. Return and try
2429 *expr_p = error_mark_node;
2433 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2434 decl. This allows us to eliminate redundant or useless
2435 calls to "const" functions. */
2436 if (TREE_CODE (*expr_p) == CALL_EXPR)
2438 int flags = call_expr_flags (*expr_p);
2439 if (flags & (ECF_CONST | ECF_PURE)
2440 /* An infinite loop is considered a side effect. */
2441 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2442 TREE_SIDE_EFFECTS (*expr_p) = 0;
2445 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2446 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
2447 form and delegate the creation of a GIMPLE_CALL to
2448 gimplify_modify_expr. This is always possible because when
2449 WANT_VALUE is true, the caller wants the result of this call into
2450 a temporary, which means that we will emit an INIT_EXPR in
2451 internal_get_tmp_var which will then be handled by
2452 gimplify_modify_expr. */
2455 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2456 have to do is replicate it as a GIMPLE_CALL tuple. */
2457 call = gimple_build_call_from_tree (*expr_p);
2458 gimplify_seq_add_stmt (pre_p, call);
2459 *expr_p = NULL_TREE;
2465 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2466 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2468 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2469 condition is true or false, respectively. If null, we should generate
2470 our own to skip over the evaluation of this specific expression.
2472 LOCUS is the source location of the COND_EXPR.
2474 This function is the tree equivalent of do_jump.
2476 shortcut_cond_r should only be called by shortcut_cond_expr. */
2479 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2482 tree local_label = NULL_TREE;
2483 tree t, expr = NULL;
2485 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2486 retain the shortcut semantics. Just insert the gotos here;
2487 shortcut_cond_expr will append the real blocks later. */
2488 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2490 location_t new_locus;
2492 /* Turn if (a && b) into
2494 if (a); else goto no;
2495 if (b) goto yes; else goto no;
2498 if (false_label_p == NULL)
2499 false_label_p = &local_label;
2501 /* Keep the original source location on the first 'if'. */
2502 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2503 append_to_statement_list (t, &expr);
2505 /* Set the source location of the && on the second 'if'. */
2506 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2507 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2509 append_to_statement_list (t, &expr);
2511 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2513 location_t new_locus;
2515 /* Turn if (a || b) into
2518 if (b) goto yes; else goto no;
2521 if (true_label_p == NULL)
2522 true_label_p = &local_label;
2524 /* Keep the original source location on the first 'if'. */
2525 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2526 append_to_statement_list (t, &expr);
2528 /* Set the source location of the || on the second 'if'. */
2529 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2530 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2532 append_to_statement_list (t, &expr);
2534 else if (TREE_CODE (pred) == COND_EXPR)
2536 location_t new_locus;
2538 /* As long as we're messing with gotos, turn if (a ? b : c) into
2540 if (b) goto yes; else goto no;
2542 if (c) goto yes; else goto no; */
2544 /* Keep the original source location on the first 'if'. Set the source
2545 location of the ? on the second 'if'. */
2546 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2547 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2548 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2549 false_label_p, locus),
2550 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2551 false_label_p, new_locus));
2555 expr = build3 (COND_EXPR, void_type_node, pred,
2556 build_and_jump (true_label_p),
2557 build_and_jump (false_label_p));
2558 SET_EXPR_LOCATION (expr, locus);
2563 t = build1 (LABEL_EXPR, void_type_node, local_label);
2564 append_to_statement_list (t, &expr);
2570 /* Given a conditional expression EXPR with short-circuit boolean
2571 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2572 predicate appart into the equivalent sequence of conditionals. */
2575 shortcut_cond_expr (tree expr)
2577 tree pred = TREE_OPERAND (expr, 0);
2578 tree then_ = TREE_OPERAND (expr, 1);
2579 tree else_ = TREE_OPERAND (expr, 2);
2580 tree true_label, false_label, end_label, t;
2582 tree *false_label_p;
2583 bool emit_end, emit_false, jump_over_else;
2584 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2585 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2587 /* First do simple transformations. */
2590 /* If there is no 'else', turn
2593 if (a) if (b) then c. */
2594 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2596 /* Keep the original source location on the first 'if'. */
2597 location_t locus = EXPR_HAS_LOCATION (expr)
2598 ? EXPR_LOCATION (expr) : input_location;
2599 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2600 /* Set the source location of the && on the second 'if'. */
2601 if (EXPR_HAS_LOCATION (pred))
2602 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2603 then_ = shortcut_cond_expr (expr);
2604 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2605 pred = TREE_OPERAND (pred, 0);
2606 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2607 SET_EXPR_LOCATION (expr, locus);
2613 /* If there is no 'then', turn
2616 if (a); else if (b); else d. */
2617 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2619 /* Keep the original source location on the first 'if'. */
2620 location_t locus = EXPR_HAS_LOCATION (expr)
2621 ? EXPR_LOCATION (expr) : input_location;
2622 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2623 /* Set the source location of the || on the second 'if'. */
2624 if (EXPR_HAS_LOCATION (pred))
2625 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2626 else_ = shortcut_cond_expr (expr);
2627 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2628 pred = TREE_OPERAND (pred, 0);
2629 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2630 SET_EXPR_LOCATION (expr, locus);
2634 /* If we're done, great. */
2635 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2636 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2639 /* Otherwise we need to mess with gotos. Change
2642 if (a); else goto no;
2645 and recursively gimplify the condition. */
2647 true_label = false_label = end_label = NULL_TREE;
2649 /* If our arms just jump somewhere, hijack those labels so we don't
2650 generate jumps to jumps. */
2653 && TREE_CODE (then_) == GOTO_EXPR
2654 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2656 true_label = GOTO_DESTINATION (then_);
2662 && TREE_CODE (else_) == GOTO_EXPR
2663 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2665 false_label = GOTO_DESTINATION (else_);
2670 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2672 true_label_p = &true_label;
2674 true_label_p = NULL;
2676 /* The 'else' branch also needs a label if it contains interesting code. */
2677 if (false_label || else_se)
2678 false_label_p = &false_label;
2680 false_label_p = NULL;
2682 /* If there was nothing else in our arms, just forward the label(s). */
2683 if (!then_se && !else_se)
2684 return shortcut_cond_r (pred, true_label_p, false_label_p,
2685 EXPR_HAS_LOCATION (expr)
2686 ? EXPR_LOCATION (expr) : input_location);
2688 /* If our last subexpression already has a terminal label, reuse it. */
2690 t = expr_last (else_);
2692 t = expr_last (then_);
2695 if (t && TREE_CODE (t) == LABEL_EXPR)
2696 end_label = LABEL_EXPR_LABEL (t);
2698 /* If we don't care about jumping to the 'else' branch, jump to the end
2699 if the condition is false. */
2701 false_label_p = &end_label;
2703 /* We only want to emit these labels if we aren't hijacking them. */
2704 emit_end = (end_label == NULL_TREE);
2705 emit_false = (false_label == NULL_TREE);
2707 /* We only emit the jump over the else clause if we have to--if the
2708 then clause may fall through. Otherwise we can wind up with a
2709 useless jump and a useless label at the end of gimplified code,
2710 which will cause us to think that this conditional as a whole
2711 falls through even if it doesn't. If we then inline a function
2712 which ends with such a condition, that can cause us to issue an
2713 inappropriate warning about control reaching the end of a
2714 non-void function. */
2715 jump_over_else = block_may_fallthru (then_);
2717 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2718 EXPR_HAS_LOCATION (expr)
2719 ? EXPR_LOCATION (expr) : input_location);
2722 append_to_statement_list (pred, &expr);
2724 append_to_statement_list (then_, &expr);
2729 tree last = expr_last (expr);
2730 t = build_and_jump (&end_label);
2731 if (EXPR_HAS_LOCATION (last))
2732 SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2733 append_to_statement_list (t, &expr);
2737 t = build1 (LABEL_EXPR, void_type_node, false_label);
2738 append_to_statement_list (t, &expr);
2740 append_to_statement_list (else_, &expr);
2742 if (emit_end && end_label)
2744 t = build1 (LABEL_EXPR, void_type_node, end_label);
2745 append_to_statement_list (t, &expr);
2751 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2754 gimple_boolify (tree expr)
2756 tree type = TREE_TYPE (expr);
2757 location_t loc = EXPR_LOCATION (expr);
2759 if (TREE_CODE (type) == BOOLEAN_TYPE)
2762 switch (TREE_CODE (expr))
2764 case TRUTH_AND_EXPR:
2766 case TRUTH_XOR_EXPR:
2767 case TRUTH_ANDIF_EXPR:
2768 case TRUTH_ORIF_EXPR:
2769 /* Also boolify the arguments of truth exprs. */
2770 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2773 case TRUTH_NOT_EXPR:
2774 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2777 case EQ_EXPR: case NE_EXPR:
2778 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2779 /* These expressions always produce boolean results. */
2780 TREE_TYPE (expr) = boolean_type_node;
2784 /* Other expressions that get here must have boolean values, but
2785 might need to be converted to the appropriate mode. */
2786 return fold_convert_loc (loc, boolean_type_node, expr);
2790 /* Given a conditional expression *EXPR_P without side effects, gimplify
2791 its operands. New statements are inserted to PRE_P. */
2793 static enum gimplify_status
2794 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2796 tree expr = *expr_p, cond;
2797 enum gimplify_status ret, tret;
2798 enum tree_code code;
2800 cond = gimple_boolify (COND_EXPR_COND (expr));
2802 /* We need to handle && and || specially, as their gimplification
2803 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2804 code = TREE_CODE (cond);
2805 if (code == TRUTH_ANDIF_EXPR)
2806 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2807 else if (code == TRUTH_ORIF_EXPR)
2808 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2809 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2810 COND_EXPR_COND (*expr_p) = cond;
2812 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2813 is_gimple_val, fb_rvalue);
2814 ret = MIN (ret, tret);
2815 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2816 is_gimple_val, fb_rvalue);
2818 return MIN (ret, tret);
2821 /* Returns true if evaluating EXPR could trap.
2822 EXPR is GENERIC, while tree_could_trap_p can be called
2826 generic_expr_could_trap_p (tree expr)
2830 if (!expr || is_gimple_val (expr))
2833 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2836 n = TREE_OPERAND_LENGTH (expr);
2837 for (i = 0; i < n; i++)
2838 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2844 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2853 The second form is used when *EXPR_P is of type void.
2855 PRE_P points to the list where side effects that must happen before
2856 *EXPR_P should be stored. */
2858 static enum gimplify_status
2859 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2861 tree expr = *expr_p;
2862 tree tmp, type, arm1, arm2;
2863 enum gimplify_status ret;
2864 tree label_true, label_false, label_cont;
2865 bool have_then_clause_p, have_else_clause_p;
2867 enum tree_code pred_code;
2868 gimple_seq seq = NULL;
2869 location_t loc = EXPR_LOCATION (*expr_p);
2871 type = TREE_TYPE (expr);
2873 /* If this COND_EXPR has a value, copy the values into a temporary within
2875 if (! VOID_TYPE_P (type))
2879 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2880 an addressable temporary. */
2881 if (((fallback & fb_rvalue)
2882 || !(fallback & fb_lvalue))
2883 && !TREE_ADDRESSABLE (type))
2885 if (gimplify_ctxp->allow_rhs_cond_expr
2886 /* If either branch has side effects or could trap, it can't be
2887 evaluated unconditionally. */
2888 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2889 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2890 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2891 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2892 return gimplify_pure_cond_expr (expr_p, pre_p);
2894 result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2899 tree type = build_pointer_type (TREE_TYPE (expr));
2901 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2902 TREE_OPERAND (expr, 1) =
2903 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 1));
2905 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2906 TREE_OPERAND (expr, 2) =
2907 build_fold_addr_expr_loc (loc, TREE_OPERAND (expr, 2));
2909 tmp = create_tmp_var (type, "iftmp");
2911 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2912 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2914 result = build_fold_indirect_ref_loc (loc, tmp);
2917 /* Build the then clause, 't1 = a;'. But don't build an assignment
2918 if this branch is void; in C++ it can be, if it's a throw. */
2919 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2920 TREE_OPERAND (expr, 1)
2921 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
2923 /* Build the else clause, 't1 = b;'. */
2924 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2925 TREE_OPERAND (expr, 2)
2926 = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
2928 TREE_TYPE (expr) = void_type_node;
2929 recalculate_side_effects (expr);
2931 /* Move the COND_EXPR to the prequeue. */
2932 gimplify_stmt (&expr, pre_p);
2938 /* Make sure the condition has BOOLEAN_TYPE. */
2939 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2941 /* Break apart && and || conditions. */
2942 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2943 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2945 expr = shortcut_cond_expr (expr);
2947 if (expr != *expr_p)
2951 /* We can't rely on gimplify_expr to re-gimplify the expanded
2952 form properly, as cleanups might cause the target labels to be
2953 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2954 set up a conditional context. */
2955 gimple_push_condition ();
2956 gimplify_stmt (expr_p, &seq);
2957 gimple_pop_condition (pre_p);
2958 gimple_seq_add_seq (pre_p, seq);
2964 /* Now do the normal gimplification. */
2966 /* Gimplify condition. */
2967 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
2969 if (ret == GS_ERROR)
2971 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
2973 gimple_push_condition ();
2975 have_then_clause_p = have_else_clause_p = false;
2976 if (TREE_OPERAND (expr, 1) != NULL
2977 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
2978 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
2979 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
2980 == current_function_decl)
2981 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
2982 have different locations, otherwise we end up with incorrect
2983 location information on the branches. */
2985 || !EXPR_HAS_LOCATION (expr)
2986 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
2987 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
2989 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
2990 have_then_clause_p = true;
2993 label_true = create_artificial_label (UNKNOWN_LOCATION);
2994 if (TREE_OPERAND (expr, 2) != NULL
2995 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
2996 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
2997 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
2998 == current_function_decl)
2999 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3000 have different locations, otherwise we end up with incorrect
3001 location information on the branches. */
3003 || !EXPR_HAS_LOCATION (expr)
3004 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3005 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3007 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3008 have_else_clause_p = true;
3011 label_false = create_artificial_label (UNKNOWN_LOCATION);
3013 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3016 gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3019 gimplify_seq_add_stmt (&seq, gimple_cond);
3020 label_cont = NULL_TREE;
3021 if (!have_then_clause_p)
3023 /* For if (...) {} else { code; } put label_true after
3025 if (TREE_OPERAND (expr, 1) == NULL_TREE
3026 && !have_else_clause_p
3027 && TREE_OPERAND (expr, 2) != NULL_TREE)
3028 label_cont = label_true;
3031 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3032 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3033 /* For if (...) { code; } else {} or
3034 if (...) { code; } else goto label; or
3035 if (...) { code; return; } else { ... }
3036 label_cont isn't needed. */
3037 if (!have_else_clause_p
3038 && TREE_OPERAND (expr, 2) != NULL_TREE
3039 && gimple_seq_may_fallthru (seq))
3042 label_cont = create_artificial_label (UNKNOWN_LOCATION);
3044 g = gimple_build_goto (label_cont);
3046 /* GIMPLE_COND's are very low level; they have embedded
3047 gotos. This particular embedded goto should not be marked
3048 with the location of the original COND_EXPR, as it would
3049 correspond to the COND_EXPR's condition, not the ELSE or the
3050 THEN arms. To avoid marking it with the wrong location, flag
3051 it as "no location". */
3052 gimple_set_do_not_emit_location (g);
3054 gimplify_seq_add_stmt (&seq, g);
3058 if (!have_else_clause_p)
3060 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3061 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3064 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3066 gimple_pop_condition (pre_p);
3067 gimple_seq_add_seq (pre_p, seq);
3069 if (ret == GS_ERROR)
3071 else if (have_then_clause_p || have_else_clause_p)
3075 /* Both arms are empty; replace the COND_EXPR with its predicate. */
3076 expr = TREE_OPERAND (expr, 0);
3077 gimplify_stmt (&expr, pre_p);
3084 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3085 a call to __builtin_memcpy. */
3087 static enum gimplify_status
3088 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3091 tree t, to, to_ptr, from, from_ptr;
3093 location_t loc = EXPR_LOCATION (*expr_p);
3095 to = TREE_OPERAND (*expr_p, 0);
3096 from = TREE_OPERAND (*expr_p, 1);
3098 mark_addressable (from);
3099 from_ptr = build_fold_addr_expr_loc (loc, from);
3100 gimplify_arg (&from_ptr, seq_p, loc);
3102 mark_addressable (to);
3103 to_ptr = build_fold_addr_expr_loc (loc, to);
3104 gimplify_arg (&to_ptr, seq_p, loc);
3106 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
3108 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3112 /* tmp = memcpy() */
3113 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3114 gimple_call_set_lhs (gs, t);
3115 gimplify_seq_add_stmt (seq_p, gs);
3117 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3121 gimplify_seq_add_stmt (seq_p, gs);
3126 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
3127 a call to __builtin_memset. In this case we know that the RHS is
3128 a CONSTRUCTOR with an empty element list. */
3130 static enum gimplify_status
3131 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3134 tree t, from, to, to_ptr;
3136 location_t loc = EXPR_LOCATION (*expr_p);
3138 /* Assert our assumptions, to abort instead of producing wrong code
3139 silently if they are not met. Beware that the RHS CONSTRUCTOR might
3140 not be immediately exposed. */
3141 from = TREE_OPERAND (*expr_p, 1);
3142 if (TREE_CODE (from) == WITH_SIZE_EXPR)
3143 from = TREE_OPERAND (from, 0);
3145 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3146 && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3149 to = TREE_OPERAND (*expr_p, 0);
3151 to_ptr = build_fold_addr_expr_loc (loc, to);
3152 gimplify_arg (&to_ptr, seq_p, loc);
3153 t = implicit_built_in_decls[BUILT_IN_MEMSET];
3155 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3159 /* tmp = memset() */
3160 t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3161 gimple_call_set_lhs (gs, t);
3162 gimplify_seq_add_stmt (seq_p, gs);
3164 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3168 gimplify_seq_add_stmt (seq_p, gs);
3173 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
3174 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3175 assignment. Returns non-null if we detect a potential overlap. */
3177 struct gimplify_init_ctor_preeval_data
3179 /* The base decl of the lhs object. May be NULL, in which case we
3180 have to assume the lhs is indirect. */
3183 /* The alias set of the lhs object. */
3184 alias_set_type lhs_alias_set;
3188 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3190 struct gimplify_init_ctor_preeval_data *data
3191 = (struct gimplify_init_ctor_preeval_data *) xdata;
3194 /* If we find the base object, obviously we have overlap. */
3195 if (data->lhs_base_decl == t)
3198 /* If the constructor component is indirect, determine if we have a
3199 potential overlap with the lhs. The only bits of information we
3200 have to go on at this point are addressability and alias sets. */
3201 if (TREE_CODE (t) == INDIRECT_REF
3202 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3203 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3206 /* If the constructor component is a call, determine if it can hide a
3207 potential overlap with the lhs through an INDIRECT_REF like above. */
3208 if (TREE_CODE (t) == CALL_EXPR)
3210 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3212 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3213 if (POINTER_TYPE_P (TREE_VALUE (type))
3214 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3215 && alias_sets_conflict_p (data->lhs_alias_set,
3217 (TREE_TYPE (TREE_VALUE (type)))))
3221 if (IS_TYPE_OR_DECL_P (t))
3226 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
3227 force values that overlap with the lhs (as described by *DATA)
3228 into temporaries. */
3231 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3232 struct gimplify_init_ctor_preeval_data *data)
3234 enum gimplify_status one;
3236 /* If the value is constant, then there's nothing to pre-evaluate. */
3237 if (TREE_CONSTANT (*expr_p))
3239 /* Ensure it does not have side effects, it might contain a reference to
3240 the object we're initializing. */
3241 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3245 /* If the type has non-trivial constructors, we can't pre-evaluate. */
3246 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3249 /* Recurse for nested constructors. */
3250 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3252 unsigned HOST_WIDE_INT ix;
3253 constructor_elt *ce;
3254 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3256 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
3257 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3262 /* If this is a variable sized type, we must remember the size. */
3263 maybe_with_size_expr (expr_p);
3265 /* Gimplify the constructor element to something appropriate for the rhs
3266 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
3267 the gimplifier will consider this a store to memory. Doing this
3268 gimplification now means that we won't have to deal with complicated
3269 language-specific trees, nor trees like SAVE_EXPR that can induce
3270 exponential search behavior. */
3271 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3272 if (one == GS_ERROR)
3278 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3279 with the lhs, since "a = { .x=a }" doesn't make sense. This will
3280 always be true for all scalars, since is_gimple_mem_rhs insists on a
3281 temporary variable for them. */
3282 if (DECL_P (*expr_p))
3285 /* If this is of variable size, we have no choice but to assume it doesn't
3286 overlap since we can't make a temporary for it. */
3287 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3290 /* Otherwise, we must search for overlap ... */
3291 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3294 /* ... and if found, force the value into a temporary. */
3295 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3298 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
3299 a RANGE_EXPR in a CONSTRUCTOR for an array.
3303 object[var] = value;
3310 We increment var _after_ the loop exit check because we might otherwise
3311 fail if upper == TYPE_MAX_VALUE (type for upper).
3313 Note that we never have to deal with SAVE_EXPRs here, because this has
3314 already been taken care of for us, in gimplify_init_ctor_preeval(). */
3316 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3317 gimple_seq *, bool);
3320 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3321 tree value, tree array_elt_type,
3322 gimple_seq *pre_p, bool cleared)
3324 tree loop_entry_label, loop_exit_label, fall_thru_label;
3325 tree var, var_type, cref, tmp;
3327 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3328 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3329 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3331 /* Create and initialize the index variable. */
3332 var_type = TREE_TYPE (upper);
3333 var = create_tmp_var (var_type, NULL);
3334 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3336 /* Add the loop entry label. */
3337 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3339 /* Build the reference. */
3340 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3341 var, NULL_TREE, NULL_TREE);
3343 /* If we are a constructor, just call gimplify_init_ctor_eval to do
3344 the store. Otherwise just assign value to the reference. */
3346 if (TREE_CODE (value) == CONSTRUCTOR)
3347 /* NB we might have to call ourself recursively through
3348 gimplify_init_ctor_eval if the value is a constructor. */
3349 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3352 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3354 /* We exit the loop when the index var is equal to the upper bound. */
3355 gimplify_seq_add_stmt (pre_p,
3356 gimple_build_cond (EQ_EXPR, var, upper,
3357 loop_exit_label, fall_thru_label));
3359 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3361 /* Otherwise, increment the index var... */
3362 tmp = build2 (PLUS_EXPR, var_type, var,
3363 fold_convert (var_type, integer_one_node));
3364 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3366 /* ...and jump back to the loop entry. */
3367 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3369 /* Add the loop exit label. */
3370 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3373 /* Return true if FDECL is accessing a field that is zero sized. */
3376 zero_sized_field_decl (const_tree fdecl)
3378 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3379 && integer_zerop (DECL_SIZE (fdecl)))
3384 /* Return true if TYPE is zero sized. */
3387 zero_sized_type (const_tree type)
3389 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3390 && integer_zerop (TYPE_SIZE (type)))
3395 /* A subroutine of gimplify_init_constructor. Generate individual
3396 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3397 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3398 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3402 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3403 gimple_seq *pre_p, bool cleared)
3405 tree array_elt_type = NULL;
3406 unsigned HOST_WIDE_INT ix;
3407 tree purpose, value;
3409 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3410 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3412 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3416 /* NULL values are created above for gimplification errors. */
3420 if (cleared && initializer_zerop (value))
3423 /* ??? Here's to hoping the front end fills in all of the indices,
3424 so we don't have to figure out what's missing ourselves. */
3425 gcc_assert (purpose);
3427 /* Skip zero-sized fields, unless value has side-effects. This can
3428 happen with calls to functions returning a zero-sized type, which
3429 we shouldn't discard. As a number of downstream passes don't
3430 expect sets of zero-sized fields, we rely on the gimplification of
3431 the MODIFY_EXPR we make below to drop the assignment statement. */
3432 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3435 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3437 if (TREE_CODE (purpose) == RANGE_EXPR)
3439 tree lower = TREE_OPERAND (purpose, 0);
3440 tree upper = TREE_OPERAND (purpose, 1);
3442 /* If the lower bound is equal to upper, just treat it as if
3443 upper was the index. */
3444 if (simple_cst_equal (lower, upper))
3448 gimplify_init_ctor_eval_range (object, lower, upper, value,
3449 array_elt_type, pre_p, cleared);
3456 /* Do not use bitsizetype for ARRAY_REF indices. */
3457 if (TYPE_DOMAIN (TREE_TYPE (object)))
3458 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3460 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3461 purpose, NULL_TREE, NULL_TREE);
3465 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3466 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3467 unshare_expr (object), purpose, NULL_TREE);
3470 if (TREE_CODE (value) == CONSTRUCTOR
3471 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3472 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3476 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3477 gimplify_and_add (init, pre_p);
3484 /* Returns the appropriate RHS predicate for this LHS. */
3487 rhs_predicate_for (tree lhs)
3489 if (is_gimple_reg (lhs))
3490 return is_gimple_reg_rhs_or_call;
3492 return is_gimple_mem_rhs_or_call;
3495 /* Gimplify a C99 compound literal expression. This just means adding
3496 the DECL_EXPR before the current statement and using its anonymous
3499 static enum gimplify_status
3500 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3502 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3503 tree decl = DECL_EXPR_DECL (decl_s);
3504 /* Mark the decl as addressable if the compound literal
3505 expression is addressable now, otherwise it is marked too late
3506 after we gimplify the initialization expression. */
3507 if (TREE_ADDRESSABLE (*expr_p))
3508 TREE_ADDRESSABLE (decl) = 1;
3510 /* Preliminarily mark non-addressed complex variables as eligible
3511 for promotion to gimple registers. We'll transform their uses
3513 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3514 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3515 && !TREE_THIS_VOLATILE (decl)
3516 && !needs_to_live_in_memory (decl))
3517 DECL_GIMPLE_REG_P (decl) = 1;
3519 /* This decl isn't mentioned in the enclosing block, so add it to the
3520 list of temps. FIXME it seems a bit of a kludge to say that
3521 anonymous artificial vars aren't pushed, but everything else is. */
3522 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3523 gimple_add_tmp_var (decl);
3525 gimplify_and_add (decl_s, pre_p);
3530 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3531 return a new CONSTRUCTOR if something changed. */
3534 optimize_compound_literals_in_ctor (tree orig_ctor)
3536 tree ctor = orig_ctor;
3537 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3538 unsigned int idx, num = VEC_length (constructor_elt, elts);
3540 for (idx = 0; idx < num; idx++)
3542 tree value = VEC_index (constructor_elt, elts, idx)->value;
3543 tree newval = value;
3544 if (TREE_CODE (value) == CONSTRUCTOR)
3545 newval = optimize_compound_literals_in_ctor (value);
3546 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3548 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3549 tree decl = DECL_EXPR_DECL (decl_s);
3550 tree init = DECL_INITIAL (decl);
3552 if (!TREE_ADDRESSABLE (value)
3553 && !TREE_ADDRESSABLE (decl)
3555 newval = optimize_compound_literals_in_ctor (init);
3557 if (newval == value)
3560 if (ctor == orig_ctor)
3562 ctor = copy_node (orig_ctor);
3563 CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3564 elts = CONSTRUCTOR_ELTS (ctor);
3566 VEC_index (constructor_elt, elts, idx)->value = newval;
3573 /* A subroutine of gimplify_modify_expr. Break out elements of a
3574 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3576 Note that we still need to clear any elements that don't have explicit
3577 initializers, so if not all elements are initialized we keep the
3578 original MODIFY_EXPR, we just remove all of the constructor elements.
3580 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3581 GS_ERROR if we would have to create a temporary when gimplifying
3582 this constructor. Otherwise, return GS_OK.
3584 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3586 static enum gimplify_status
3587 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3588 bool want_value, bool notify_temp_creation)
3590 tree object, ctor, type;
3591 enum gimplify_status ret;
3592 VEC(constructor_elt,gc) *elts;
3594 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3596 if (!notify_temp_creation)
3598 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3599 is_gimple_lvalue, fb_lvalue);
3600 if (ret == GS_ERROR)
3604 object = TREE_OPERAND (*expr_p, 0);
3605 ctor = TREE_OPERAND (*expr_p, 1) =
3606 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3607 type = TREE_TYPE (ctor);
3608 elts = CONSTRUCTOR_ELTS (ctor);
3611 switch (TREE_CODE (type))
3615 case QUAL_UNION_TYPE:
3618 struct gimplify_init_ctor_preeval_data preeval_data;
3619 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3620 HOST_WIDE_INT num_nonzero_elements;
3621 bool cleared, valid_const_initializer;
3623 /* Aggregate types must lower constructors to initialization of
3624 individual elements. The exception is that a CONSTRUCTOR node
3625 with no elements indicates zero-initialization of the whole. */
3626 if (VEC_empty (constructor_elt, elts))
3628 if (notify_temp_creation)
3633 /* Fetch information about the constructor to direct later processing.
3634 We might want to make static versions of it in various cases, and
3635 can only do so if it known to be a valid constant initializer. */
3636 valid_const_initializer
3637 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3638 &num_ctor_elements, &cleared);
3640 /* If a const aggregate variable is being initialized, then it
3641 should never be a lose to promote the variable to be static. */
3642 if (valid_const_initializer
3643 && num_nonzero_elements > 1
3644 && TREE_READONLY (object)
3645 && TREE_CODE (object) == VAR_DECL
3646 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3648 if (notify_temp_creation)
3650 DECL_INITIAL (object) = ctor;
3651 TREE_STATIC (object) = 1;
3652 if (!DECL_NAME (object))
3653 DECL_NAME (object) = create_tmp_var_name ("C");
3654 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3656 /* ??? C++ doesn't automatically append a .<number> to the
3657 assembler name, and even when it does, it looks a FE private
3658 data structures to figure out what that number should be,
3659 which are not set for this variable. I suppose this is
3660 important for local statics for inline functions, which aren't
3661 "local" in the object file sense. So in order to get a unique
3662 TU-local symbol, we must invoke the lhd version now. */
3663 lhd_set_decl_assembler_name (object);
3665 *expr_p = NULL_TREE;
3669 /* If there are "lots" of initialized elements, even discounting
3670 those that are not address constants (and thus *must* be
3671 computed at runtime), then partition the constructor into
3672 constant and non-constant parts. Block copy the constant
3673 parts in, then generate code for the non-constant parts. */
3674 /* TODO. There's code in cp/typeck.c to do this. */
3676 num_type_elements = count_type_elements (type, true);
3678 /* If count_type_elements could not determine number of type elements
3679 for a constant-sized object, assume clearing is needed.
3680 Don't do this for variable-sized objects, as store_constructor
3681 will ignore the clearing of variable-sized objects. */
3682 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3684 /* If there are "lots" of zeros, then block clear the object first. */
3685 else if (num_type_elements - num_nonzero_elements
3686 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3687 && num_nonzero_elements < num_type_elements/4)
3689 /* ??? This bit ought not be needed. For any element not present
3690 in the initializer, we should simply set them to zero. Except
3691 we'd need to *find* the elements that are not present, and that
3692 requires trickery to avoid quadratic compile-time behavior in
3693 large cases or excessive memory use in small cases. */
3694 else if (num_ctor_elements < num_type_elements)
3697 /* If there are "lots" of initialized elements, and all of them
3698 are valid address constants, then the entire initializer can
3699 be dropped to memory, and then memcpy'd out. Don't do this
3700 for sparse arrays, though, as it's more efficient to follow
3701 the standard CONSTRUCTOR behavior of memset followed by
3702 individual element initialization. Also don't do this for small
3703 all-zero initializers (which aren't big enough to merit
3704 clearing), and don't try to make bitwise copies of
3705 TREE_ADDRESSABLE types. */
3706 if (valid_const_initializer
3707 && !(cleared || num_nonzero_elements == 0)
3708 && !TREE_ADDRESSABLE (type))
3710 HOST_WIDE_INT size = int_size_in_bytes (type);
3713 /* ??? We can still get unbounded array types, at least
3714 from the C++ front end. This seems wrong, but attempt
3715 to work around it for now. */
3718 size = int_size_in_bytes (TREE_TYPE (object));
3720 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3723 /* Find the maximum alignment we can assume for the object. */
3724 /* ??? Make use of DECL_OFFSET_ALIGN. */
3725 if (DECL_P (object))
3726 align = DECL_ALIGN (object);
3728 align = TYPE_ALIGN (type);
3731 && num_nonzero_elements > 1
3732 && !can_move_by_pieces (size, align))
3736 if (notify_temp_creation)
3739 new_tree = create_tmp_var_raw (type, "C");
3741 gimple_add_tmp_var (new_tree);
3742 TREE_STATIC (new_tree) = 1;
3743 TREE_READONLY (new_tree) = 1;
3744 DECL_INITIAL (new_tree) = ctor;
3745 if (align > DECL_ALIGN (new_tree))
3747 DECL_ALIGN (new_tree) = align;
3748 DECL_USER_ALIGN (new_tree) = 1;
3750 walk_tree (&DECL_INITIAL (new_tree), force_labels_r, NULL, NULL);
3752 TREE_OPERAND (*expr_p, 1) = new_tree;
3754 /* This is no longer an assignment of a CONSTRUCTOR, but
3755 we still may have processing to do on the LHS. So
3756 pretend we didn't do anything here to let that happen. */
3757 return GS_UNHANDLED;
3761 if (notify_temp_creation)
3764 /* If there are nonzero elements, pre-evaluate to capture elements
3765 overlapping with the lhs into temporaries. We must do this before
3766 clearing to fetch the values before they are zeroed-out. */
3767 if (num_nonzero_elements > 0)
3769 preeval_data.lhs_base_decl = get_base_address (object);
3770 if (!DECL_P (preeval_data.lhs_base_decl))
3771 preeval_data.lhs_base_decl = NULL;
3772 preeval_data.lhs_alias_set = get_alias_set (object);
3774 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3775 pre_p, post_p, &preeval_data);
3780 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3781 Note that we still have to gimplify, in order to handle the
3782 case of variable sized types. Avoid shared tree structures. */
3783 CONSTRUCTOR_ELTS (ctor) = NULL;
3784 TREE_SIDE_EFFECTS (ctor) = 0;
3785 object = unshare_expr (object);
3786 gimplify_stmt (expr_p, pre_p);
3789 /* If we have not block cleared the object, or if there are nonzero
3790 elements in the constructor, add assignments to the individual
3791 scalar fields of the object. */
3792 if (!cleared || num_nonzero_elements > 0)
3793 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3795 *expr_p = NULL_TREE;
3803 if (notify_temp_creation)
3806 /* Extract the real and imaginary parts out of the ctor. */
3807 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3808 r = VEC_index (constructor_elt, elts, 0)->value;
3809 i = VEC_index (constructor_elt, elts, 1)->value;
3810 if (r == NULL || i == NULL)
3812 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3819 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3820 represent creation of a complex value. */
3821 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3823 ctor = build_complex (type, r, i);
3824 TREE_OPERAND (*expr_p, 1) = ctor;
3828 ctor = build2 (COMPLEX_EXPR, type, r, i);
3829 TREE_OPERAND (*expr_p, 1) = ctor;
3830 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3833 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3841 unsigned HOST_WIDE_INT ix;
3842 constructor_elt *ce;
3844 if (notify_temp_creation)
3847 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3848 if (TREE_CONSTANT (ctor))
3850 bool constant_p = true;
3853 /* Even when ctor is constant, it might contain non-*_CST
3854 elements, such as addresses or trapping values like
3855 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3856 in VECTOR_CST nodes. */
3857 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3858 if (!CONSTANT_CLASS_P (value))
3866 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3870 /* Don't reduce an initializer constant even if we can't
3871 make a VECTOR_CST. It won't do anything for us, and it'll
3872 prevent us from representing it as a single constant. */
3873 if (initializer_constant_valid_p (ctor, type))
3876 TREE_CONSTANT (ctor) = 0;
3879 /* Vector types use CONSTRUCTOR all the way through gimple
3880 compilation as a general initializer. */
3881 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3883 enum gimplify_status tret;
3884 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3886 if (tret == GS_ERROR)
3889 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
3890 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3895 /* So how did we get a CONSTRUCTOR for a scalar type? */
3899 if (ret == GS_ERROR)
3901 else if (want_value)
3908 /* If we have gimplified both sides of the initializer but have
3909 not emitted an assignment, do so now. */
3912 tree lhs = TREE_OPERAND (*expr_p, 0);
3913 tree rhs = TREE_OPERAND (*expr_p, 1);
3914 gimple init = gimple_build_assign (lhs, rhs);
3915 gimplify_seq_add_stmt (pre_p, init);
3923 /* Given a pointer value OP0, return a simplified version of an
3924 indirection through OP0, or NULL_TREE if no simplification is
3925 possible. Note that the resulting type may be different from
3926 the type pointed to in the sense that it is still compatible
3927 from the langhooks point of view. */
3930 gimple_fold_indirect_ref (tree t)
3932 tree type = TREE_TYPE (TREE_TYPE (t));
3936 STRIP_USELESS_TYPE_CONVERSION (sub);
3937 subtype = TREE_TYPE (sub);
3938 if (!POINTER_TYPE_P (subtype))
3941 if (TREE_CODE (sub) == ADDR_EXPR)
3943 tree op = TREE_OPERAND (sub, 0);
3944 tree optype = TREE_TYPE (op);
3946 if (useless_type_conversion_p (type, optype))
3949 /* *(foo *)&fooarray => fooarray[0] */
3950 if (TREE_CODE (optype) == ARRAY_TYPE
3951 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3953 tree type_domain = TYPE_DOMAIN (optype);
3954 tree min_val = size_zero_node;
3955 if (type_domain && TYPE_MIN_VALUE (type_domain))
3956 min_val = TYPE_MIN_VALUE (type_domain);
3957 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3961 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3962 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3963 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3966 tree min_val = size_zero_node;
3968 sub = gimple_fold_indirect_ref (sub);
3970 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3971 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3972 if (type_domain && TYPE_MIN_VALUE (type_domain))
3973 min_val = TYPE_MIN_VALUE (type_domain);
3974 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3980 /* Given a pointer value OP0, return a simplified version of an
3981 indirection through OP0, or NULL_TREE if no simplification is
3982 possible. This may only be applied to a rhs of an expression.
3983 Note that the resulting type may be different from the type pointed
3984 to in the sense that it is still compatible from the langhooks
3988 gimple_fold_indirect_ref_rhs (tree t)
3990 return gimple_fold_indirect_ref (t);
3993 /* Subroutine of gimplify_modify_expr to do simplifications of
3994 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3995 something changes. */
3997 static enum gimplify_status
3998 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
3999 gimple_seq *pre_p, gimple_seq *post_p,
4002 enum gimplify_status ret = GS_OK;
4004 while (ret != GS_UNHANDLED)
4005 switch (TREE_CODE (*from_p))
4008 /* If we're assigning from a read-only variable initialized with
4009 a constructor, do the direct assignment from the constructor,
4010 but only if neither source nor target are volatile since this
4011 latter assignment might end up being done on a per-field basis. */
4012 if (DECL_INITIAL (*from_p)
4013 && TREE_READONLY (*from_p)
4014 && !TREE_THIS_VOLATILE (*from_p)
4015 && !TREE_THIS_VOLATILE (*to_p)
4016 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4018 tree old_from = *from_p;
4020 /* Move the constructor into the RHS. */
4021 *from_p = unshare_expr (DECL_INITIAL (*from_p));
4023 /* Let's see if gimplify_init_constructor will need to put
4024 it in memory. If so, revert the change. */
4025 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
4026 if (ret == GS_ERROR)
4041 /* If we have code like
4045 where the type of "x" is a (possibly cv-qualified variant
4046 of "A"), treat the entire expression as identical to "x".
4047 This kind of code arises in C++ when an object is bound
4048 to a const reference, and if "x" is a TARGET_EXPR we want
4049 to take advantage of the optimization below. */
4050 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4063 /* If we are initializing something from a TARGET_EXPR, strip the
4064 TARGET_EXPR and initialize it directly, if possible. This can't
4065 be done if the initializer is void, since that implies that the
4066 temporary is set in some non-trivial way.
4068 ??? What about code that pulls out the temp and uses it
4069 elsewhere? I think that such code never uses the TARGET_EXPR as
4070 an initializer. If I'm wrong, we'll die because the temp won't
4071 have any RTL. In that case, I guess we'll need to replace
4072 references somehow. */
4073 tree init = TARGET_EXPR_INITIAL (*from_p);
4076 && !VOID_TYPE_P (TREE_TYPE (init)))
4087 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4089 gimplify_compound_expr (from_p, pre_p, true);
4094 /* If we're initializing from a CONSTRUCTOR, break this into
4095 individual MODIFY_EXPRs. */
4096 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4100 /* If we're assigning to a non-register type, push the assignment
4101 down into the branches. This is mandatory for ADDRESSABLE types,
4102 since we cannot generate temporaries for such, but it saves a
4103 copy in other cases as well. */
4104 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4106 /* This code should mirror the code in gimplify_cond_expr. */
4107 enum tree_code code = TREE_CODE (*expr_p);
4108 tree cond = *from_p;
4109 tree result = *to_p;
4111 ret = gimplify_expr (&result, pre_p, post_p,
4112 is_gimple_lvalue, fb_lvalue);
4113 if (ret != GS_ERROR)
4116 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4117 TREE_OPERAND (cond, 1)
4118 = build2 (code, void_type_node, result,
4119 TREE_OPERAND (cond, 1));
4120 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4121 TREE_OPERAND (cond, 2)
4122 = build2 (code, void_type_node, unshare_expr (result),
4123 TREE_OPERAND (cond, 2));
4125 TREE_TYPE (cond) = void_type_node;
4126 recalculate_side_effects (cond);
4130 gimplify_and_add (cond, pre_p);
4131 *expr_p = unshare_expr (result);
4142 /* For calls that return in memory, give *to_p as the CALL_EXPR's
4143 return slot so that we don't generate a temporary. */
4144 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4145 && aggregate_value_p (*from_p, *from_p))
4149 if (!(rhs_predicate_for (*to_p))(*from_p))
4150 /* If we need a temporary, *to_p isn't accurate. */
4152 else if (TREE_CODE (*to_p) == RESULT_DECL
4153 && DECL_NAME (*to_p) == NULL_TREE
4154 && needs_to_live_in_memory (*to_p))
4155 /* It's OK to use the return slot directly unless it's an NRV. */
4157 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4158 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4159 /* Don't force regs into memory. */
4161 else if (TREE_CODE (*expr_p) == INIT_EXPR)
4162 /* It's OK to use the target directly if it's being
4165 else if (!is_gimple_non_addressable (*to_p))
4166 /* Don't use the original target if it's already addressable;
4167 if its address escapes, and the called function uses the
4168 NRV optimization, a conforming program could see *to_p
4169 change before the called function returns; see c++/19317.
4170 When optimizing, the return_slot pass marks more functions
4171 as safe after we have escape info. */
4178 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4179 mark_addressable (*to_p);
4186 /* If we're initializing from a container, push the initialization
4188 case CLEANUP_POINT_EXPR:
4190 case STATEMENT_LIST:
4192 tree wrap = *from_p;
4195 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4197 if (ret != GS_ERROR)
4200 t = voidify_wrapper_expr (wrap, *expr_p);
4201 gcc_assert (t == *expr_p);
4205 gimplify_and_add (wrap, pre_p);
4206 *expr_p = unshare_expr (*to_p);
4213 case COMPOUND_LITERAL_EXPR:
4215 tree complit = TREE_OPERAND (*expr_p, 1);
4216 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4217 tree decl = DECL_EXPR_DECL (decl_s);
4218 tree init = DECL_INITIAL (decl);
4220 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4221 into struct T x = { 0, 1, 2 } if the address of the
4222 compound literal has never been taken. */
4223 if (!TREE_ADDRESSABLE (complit)
4224 && !TREE_ADDRESSABLE (decl)
4227 *expr_p = copy_node (*expr_p);
4228 TREE_OPERAND (*expr_p, 1) = init;
4242 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
4243 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4244 DECL_GIMPLE_REG_P set.
4246 IMPORTANT NOTE: This promotion is performed by introducing a load of the
4247 other, unmodified part of the complex object just before the total store.
4248 As a consequence, if the object is still uninitialized, an undefined value
4249 will be loaded into a register, which may result in a spurious exception
4250 if the register is floating-point and the value happens to be a signaling
4251 NaN for example. Then the fully-fledged complex operations lowering pass
4252 followed by a DCE pass are necessary in order to fix things up. */
4254 static enum gimplify_status
4255 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4258 enum tree_code code, ocode;
4259 tree lhs, rhs, new_rhs, other, realpart, imagpart;
4261 lhs = TREE_OPERAND (*expr_p, 0);
4262 rhs = TREE_OPERAND (*expr_p, 1);
4263 code = TREE_CODE (lhs);
4264 lhs = TREE_OPERAND (lhs, 0);
4266 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4267 other = build1 (ocode, TREE_TYPE (rhs), lhs);
4268 other = get_formal_tmp_var (other, pre_p);
4270 realpart = code == REALPART_EXPR ? rhs : other;
4271 imagpart = code == REALPART_EXPR ? other : rhs;
4273 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4274 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4276 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4278 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4279 *expr_p = (want_value) ? rhs : NULL_TREE;
4285 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4291 PRE_P points to the list where side effects that must happen before
4292 *EXPR_P should be stored.
4294 POST_P points to the list where side effects that must happen after
4295 *EXPR_P should be stored.
4297 WANT_VALUE is nonzero iff we want to use the value of this expression
4298 in another expression. */
4300 static enum gimplify_status
4301 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4304 tree *from_p = &TREE_OPERAND (*expr_p, 1);
4305 tree *to_p = &TREE_OPERAND (*expr_p, 0);
4306 enum gimplify_status ret = GS_UNHANDLED;
4308 location_t loc = EXPR_LOCATION (*expr_p);
4310 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4311 || TREE_CODE (*expr_p) == INIT_EXPR);
4313 /* Insert pointer conversions required by the middle-end that are not
4314 required by the frontend. This fixes middle-end type checking for
4315 for example gcc.dg/redecl-6.c. */
4316 if (POINTER_TYPE_P (TREE_TYPE (*to_p))
4317 && lang_hooks.types_compatible_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4319 STRIP_USELESS_TYPE_CONVERSION (*from_p);
4320 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4321 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4324 /* See if any simplifications can be done based on what the RHS is. */
4325 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4327 if (ret != GS_UNHANDLED)
4330 /* For zero sized types only gimplify the left hand side and right hand
4331 side as statements and throw away the assignment. Do this after
4332 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4334 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4336 gimplify_stmt (from_p, pre_p);
4337 gimplify_stmt (to_p, pre_p);
4338 *expr_p = NULL_TREE;
4342 /* If the value being copied is of variable width, compute the length
4343 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
4344 before gimplifying any of the operands so that we can resolve any
4345 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
4346 the size of the expression to be copied, not of the destination, so
4347 that is what we must do here. */
4348 maybe_with_size_expr (from_p);
4350 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4351 if (ret == GS_ERROR)
4354 /* As a special case, we have to temporarily allow for assignments
4355 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
4356 a toplevel statement, when gimplifying the GENERIC expression
4357 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4358 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4360 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
4361 prevent gimplify_expr from trying to create a new temporary for
4362 foo's LHS, we tell it that it should only gimplify until it
4363 reaches the CALL_EXPR. On return from gimplify_expr, the newly
4364 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4365 and all we need to do here is set 'a' to be its LHS. */
4366 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4368 if (ret == GS_ERROR)
4371 /* Now see if the above changed *from_p to something we handle specially. */
4372 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4374 if (ret != GS_UNHANDLED)
4377 /* If we've got a variable sized assignment between two lvalues (i.e. does
4378 not involve a call), then we can make things a bit more straightforward
4379 by converting the assignment to memcpy or memset. */
4380 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4382 tree from = TREE_OPERAND (*from_p, 0);
4383 tree size = TREE_OPERAND (*from_p, 1);
4385 if (TREE_CODE (from) == CONSTRUCTOR)
4386 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4388 if (is_gimple_addressable (from))
4391 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4396 /* Transform partial stores to non-addressable complex variables into
4397 total stores. This allows us to use real instead of virtual operands
4398 for these variables, which improves optimization. */
4399 if ((TREE_CODE (*to_p) == REALPART_EXPR
4400 || TREE_CODE (*to_p) == IMAGPART_EXPR)
4401 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4402 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4404 /* Try to alleviate the effects of the gimplification creating artificial
4405 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
4406 if (!gimplify_ctxp->into_ssa
4408 && DECL_IGNORED_P (*from_p)
4410 && !DECL_IGNORED_P (*to_p))
4412 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4414 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4415 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4416 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4419 if (TREE_CODE (*from_p) == CALL_EXPR)
4421 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4422 instead of a GIMPLE_ASSIGN. */
4423 assign = gimple_build_call_from_tree (*from_p);
4424 gimple_call_set_lhs (assign, *to_p);
4428 assign = gimple_build_assign (*to_p, *from_p);
4429 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4432 gimplify_seq_add_stmt (pre_p, assign);
4434 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4436 /* If we've somehow already got an SSA_NAME on the LHS, then
4437 we've probably modified it twice. Not good. */
4438 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4439 *to_p = make_ssa_name (*to_p, assign);
4440 gimple_set_lhs (assign, *to_p);
4445 *expr_p = unshare_expr (*to_p);
4454 /* Gimplify a comparison between two variable-sized objects. Do this
4455 with a call to BUILT_IN_MEMCMP. */
4457 static enum gimplify_status
4458 gimplify_variable_sized_compare (tree *expr_p)
4460 tree op0 = TREE_OPERAND (*expr_p, 0);
4461 tree op1 = TREE_OPERAND (*expr_p, 1);
4462 tree t, arg, dest, src;
4463 location_t loc = EXPR_LOCATION (*expr_p);
4465 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4466 arg = unshare_expr (arg);
4467 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4468 src = build_fold_addr_expr_loc (loc, op1);
4469 dest = build_fold_addr_expr_loc (loc, op0);
4470 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
4471 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4473 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4478 /* Gimplify a comparison between two aggregate objects of integral scalar
4479 mode as a comparison between the bitwise equivalent scalar values. */
4481 static enum gimplify_status
4482 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4484 location_t loc = EXPR_LOCATION (*expr_p);
4485 tree op0 = TREE_OPERAND (*expr_p, 0);
4486 tree op1 = TREE_OPERAND (*expr_p, 1);
4488 tree type = TREE_TYPE (op0);
4489 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4491 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4492 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4495 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4500 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4501 points to the expression to gimplify.
4503 Expressions of the form 'a && b' are gimplified to:
4505 a && b ? true : false
4507 LOCUS is the source location to be put on the generated COND_EXPR.
4508 gimplify_cond_expr will do the rest. */
4510 static enum gimplify_status
4511 gimplify_boolean_expr (tree *expr_p, location_t locus)
4513 /* Preserve the original type of the expression. */
4514 tree type = TREE_TYPE (*expr_p);
4516 *expr_p = build3 (COND_EXPR, type, *expr_p,
4517 fold_convert_loc (locus, type, boolean_true_node),
4518 fold_convert_loc (locus, type, boolean_false_node));
4520 SET_EXPR_LOCATION (*expr_p, locus);
4525 /* Gimplifies an expression sequence. This function gimplifies each
4526 expression and re-writes the original expression with the last
4527 expression of the sequence in GIMPLE form.
4529 PRE_P points to the list where the side effects for all the
4530 expressions in the sequence will be emitted.
4532 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4534 static enum gimplify_status
4535 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4541 tree *sub_p = &TREE_OPERAND (t, 0);
4543 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4544 gimplify_compound_expr (sub_p, pre_p, false);
4546 gimplify_stmt (sub_p, pre_p);
4548 t = TREE_OPERAND (t, 1);
4550 while (TREE_CODE (t) == COMPOUND_EXPR);
4557 gimplify_stmt (expr_p, pre_p);
4563 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4564 gimplify. After gimplification, EXPR_P will point to a new temporary
4565 that holds the original value of the SAVE_EXPR node.
4567 PRE_P points to the list where side effects that must happen before
4568 *EXPR_P should be stored. */
4570 static enum gimplify_status
4571 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4573 enum gimplify_status ret = GS_ALL_DONE;
4576 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4577 val = TREE_OPERAND (*expr_p, 0);
4579 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4580 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4582 /* The operand may be a void-valued expression such as SAVE_EXPRs
4583 generated by the Java frontend for class initialization. It is
4584 being executed only for its side-effects. */
4585 if (TREE_TYPE (val) == void_type_node)
4587 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4588 is_gimple_stmt, fb_none);
4592 val = get_initialized_tmp_var (val, pre_p, post_p);
4594 TREE_OPERAND (*expr_p, 0) = val;
4595 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4603 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4610 PRE_P points to the list where side effects that must happen before
4611 *EXPR_P should be stored.
4613 POST_P points to the list where side effects that must happen after
4614 *EXPR_P should be stored. */
4616 static enum gimplify_status
4617 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4619 tree expr = *expr_p;
4620 tree op0 = TREE_OPERAND (expr, 0);
4621 enum gimplify_status ret;
4622 location_t loc = EXPR_LOCATION (*expr_p);
4624 switch (TREE_CODE (op0))
4627 case MISALIGNED_INDIRECT_REF:
4629 /* Check if we are dealing with an expression of the form '&*ptr'.
4630 While the front end folds away '&*ptr' into 'ptr', these
4631 expressions may be generated internally by the compiler (e.g.,
4632 builtins like __builtin_va_end). */
4633 /* Caution: the silent array decomposition semantics we allow for
4634 ADDR_EXPR means we can't always discard the pair. */
4635 /* Gimplification of the ADDR_EXPR operand may drop
4636 cv-qualification conversions, so make sure we add them if
4639 tree op00 = TREE_OPERAND (op0, 0);
4640 tree t_expr = TREE_TYPE (expr);
4641 tree t_op00 = TREE_TYPE (op00);
4643 if (!useless_type_conversion_p (t_expr, t_op00))
4644 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4650 case VIEW_CONVERT_EXPR:
4651 /* Take the address of our operand and then convert it to the type of
4654 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4655 all clear. The impact of this transformation is even less clear. */
4657 /* If the operand is a useless conversion, look through it. Doing so
4658 guarantees that the ADDR_EXPR and its operand will remain of the
4660 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4661 op0 = TREE_OPERAND (op0, 0);
4663 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4664 build_fold_addr_expr_loc (loc,
4665 TREE_OPERAND (op0, 0)));
4670 /* We use fb_either here because the C frontend sometimes takes
4671 the address of a call that returns a struct; see
4672 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4673 the implied temporary explicit. */
4675 /* Mark the RHS addressable. */
4676 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4677 is_gimple_addressable, fb_either);
4678 if (ret == GS_ERROR)
4681 /* We cannot rely on making the RHS addressable if it is
4682 a temporary created by gimplification. In this case create a
4683 new temporary that is initialized by a copy (which will
4684 become a store after we mark it addressable).
4685 This mostly happens if the frontend passed us something that
4686 it could not mark addressable yet, like a fortran
4687 pass-by-reference parameter (int) floatvar. */
4688 if (is_gimple_reg (TREE_OPERAND (expr, 0)))
4689 TREE_OPERAND (expr, 0)
4690 = get_initialized_tmp_var (TREE_OPERAND (expr, 0), pre_p, post_p);
4692 op0 = TREE_OPERAND (expr, 0);
4694 /* For various reasons, the gimplification of the expression
4695 may have made a new INDIRECT_REF. */
4696 if (TREE_CODE (op0) == INDIRECT_REF)
4697 goto do_indirect_ref;
4699 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
4700 recompute_tree_invariant_for_addr_expr (expr);
4702 mark_addressable (TREE_OPERAND (expr, 0));
4709 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4710 value; output operands should be a gimple lvalue. */
4712 static enum gimplify_status
4713 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4717 const char **oconstraints;
4720 const char *constraint;
4721 bool allows_mem, allows_reg, is_inout;
4722 enum gimplify_status ret, tret;
4724 VEC(tree, gc) *inputs;
4725 VEC(tree, gc) *outputs;
4726 VEC(tree, gc) *clobbers;
4730 noutputs = list_length (ASM_OUTPUTS (expr));
4731 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4733 inputs = outputs = clobbers = NULL;
4736 link_next = NULL_TREE;
4737 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4740 size_t constraint_len;
4742 link_next = TREE_CHAIN (link);
4746 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4747 constraint_len = strlen (constraint);
4748 if (constraint_len == 0)
4751 ok = parse_output_constraint (&constraint, i, 0, 0,
4752 &allows_mem, &allows_reg, &is_inout);
4759 if (!allows_reg && allows_mem)
4760 mark_addressable (TREE_VALUE (link));
4762 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4763 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4764 fb_lvalue | fb_mayfail);
4765 if (tret == GS_ERROR)
4767 error ("invalid lvalue in asm output %d", i);
4771 VEC_safe_push (tree, gc, outputs, link);
4772 TREE_CHAIN (link) = NULL_TREE;
4776 /* An input/output operand. To give the optimizers more
4777 flexibility, split it into separate input and output
4782 /* Turn the in/out constraint into an output constraint. */
4783 char *p = xstrdup (constraint);
4785 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4787 /* And add a matching input constraint. */
4790 sprintf (buf, "%d", i);
4792 /* If there are multiple alternatives in the constraint,
4793 handle each of them individually. Those that allow register
4794 will be replaced with operand number, the others will stay
4796 if (strchr (p, ',') != NULL)
4798 size_t len = 0, buflen = strlen (buf);
4799 char *beg, *end, *str, *dst;
4803 end = strchr (beg, ',');
4805 end = strchr (beg, '\0');
4806 if ((size_t) (end - beg) < buflen)
4809 len += end - beg + 1;
4816 str = (char *) alloca (len);
4817 for (beg = p + 1, dst = str;;)
4820 bool mem_p, reg_p, inout_p;
4822 end = strchr (beg, ',');
4827 parse_output_constraint (&tem, i, 0, 0,
4828 &mem_p, ®_p, &inout_p);
4833 memcpy (dst, buf, buflen);
4842 memcpy (dst, beg, len);
4851 input = build_string (dst - str, str);
4854 input = build_string (strlen (buf), buf);
4857 input = build_string (constraint_len - 1, constraint + 1);
4861 input = build_tree_list (build_tree_list (NULL_TREE, input),
4862 unshare_expr (TREE_VALUE (link)));
4863 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4867 link_next = NULL_TREE;
4868 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
4870 link_next = TREE_CHAIN (link);
4871 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4872 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4873 oconstraints, &allows_mem, &allows_reg);
4875 /* If we can't make copies, we can only accept memory. */
4876 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4882 error ("impossible constraint in %<asm%>");
4883 error ("non-memory input %d must stay in memory", i);
4888 /* If the operand is a memory input, it should be an lvalue. */
4889 if (!allows_reg && allows_mem)
4891 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4892 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4893 mark_addressable (TREE_VALUE (link));
4894 if (tret == GS_ERROR)
4896 if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
4897 input_location = EXPR_LOCATION (TREE_VALUE (link));
4898 error ("memory input %d is not directly addressable", i);
4904 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4905 is_gimple_asm_val, fb_rvalue);
4906 if (tret == GS_ERROR)
4910 TREE_CHAIN (link) = NULL_TREE;
4911 VEC_safe_push (tree, gc, inputs, link);
4914 for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
4915 VEC_safe_push (tree, gc, clobbers, link);
4917 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
4918 inputs, outputs, clobbers);
4920 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
4921 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
4923 gimplify_seq_add_stmt (pre_p, stmt);
4928 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4929 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4930 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4931 return to this function.
4933 FIXME should we complexify the prequeue handling instead? Or use flags
4934 for all the cleanups and let the optimizer tighten them up? The current
4935 code seems pretty fragile; it will break on a cleanup within any
4936 non-conditional nesting. But any such nesting would be broken, anyway;
4937 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4938 and continues out of it. We can do that at the RTL level, though, so
4939 having an optimizer to tighten up try/finally regions would be a Good
4942 static enum gimplify_status
4943 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
4945 gimple_stmt_iterator iter;
4946 gimple_seq body_sequence = NULL;
4948 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4950 /* We only care about the number of conditions between the innermost
4951 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4952 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4953 int old_conds = gimplify_ctxp->conditions;
4954 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
4955 gimplify_ctxp->conditions = 0;
4956 gimplify_ctxp->conditional_cleanups = NULL;
4958 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
4960 gimplify_ctxp->conditions = old_conds;
4961 gimplify_ctxp->conditional_cleanups = old_cleanups;
4963 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
4965 gimple wce = gsi_stmt (iter);
4967 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
4969 if (gsi_one_before_end_p (iter))
4971 /* Note that gsi_insert_seq_before and gsi_remove do not
4972 scan operands, unlike some other sequence mutators. */
4973 gsi_insert_seq_before_without_update (&iter,
4974 gimple_wce_cleanup (wce),
4976 gsi_remove (&iter, true);
4983 enum gimple_try_flags kind;
4985 if (gimple_wce_cleanup_eh_only (wce))
4986 kind = GIMPLE_TRY_CATCH;
4988 kind = GIMPLE_TRY_FINALLY;
4989 seq = gsi_split_seq_after (iter);
4991 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
4992 /* Do not use gsi_replace here, as it may scan operands.
4993 We want to do a simple structural modification only. */
4994 *gsi_stmt_ptr (&iter) = gtry;
4995 iter = gsi_start (seq);
5002 gimplify_seq_add_seq (pre_p, body_sequence);
5015 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
5016 is the cleanup action required. EH_ONLY is true if the cleanup should
5017 only be executed if an exception is thrown, not on normal exit. */
5020 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5023 gimple_seq cleanup_stmts = NULL;
5025 /* Errors can result in improperly nested cleanups. Which results in
5026 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
5027 if (errorcount || sorrycount)
5030 if (gimple_conditional_context ())
5032 /* If we're in a conditional context, this is more complex. We only
5033 want to run the cleanup if we actually ran the initialization that
5034 necessitates it, but we want to run it after the end of the
5035 conditional context. So we wrap the try/finally around the
5036 condition and use a flag to determine whether or not to actually
5037 run the destructor. Thus
5041 becomes (approximately)
5045 if (test) { A::A(temp); flag = 1; val = f(temp); }
5048 if (flag) A::~A(temp);
5052 tree flag = create_tmp_var (boolean_type_node, "cleanup");
5053 gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5054 gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5056 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5057 gimplify_stmt (&cleanup, &cleanup_stmts);
5058 wce = gimple_build_wce (cleanup_stmts);
5060 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5061 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5062 gimplify_seq_add_stmt (pre_p, ftrue);
5064 /* Because of this manipulation, and the EH edges that jump
5065 threading cannot redirect, the temporary (VAR) will appear
5066 to be used uninitialized. Don't warn. */
5067 TREE_NO_WARNING (var) = 1;
5071 gimplify_stmt (&cleanup, &cleanup_stmts);
5072 wce = gimple_build_wce (cleanup_stmts);
5073 gimple_wce_set_cleanup_eh_only (wce, eh_only);
5074 gimplify_seq_add_stmt (pre_p, wce);
5078 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
5080 static enum gimplify_status
5081 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5083 tree targ = *expr_p;
5084 tree temp = TARGET_EXPR_SLOT (targ);
5085 tree init = TARGET_EXPR_INITIAL (targ);
5086 enum gimplify_status ret;
5090 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5091 to the temps list. Handle also variable length TARGET_EXPRs. */
5092 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5094 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5095 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5096 gimplify_vla_decl (temp, pre_p);
5099 gimple_add_tmp_var (temp);
5101 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5102 expression is supposed to initialize the slot. */
5103 if (VOID_TYPE_P (TREE_TYPE (init)))
5104 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5107 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5109 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5111 ggc_free (init_expr);
5113 if (ret == GS_ERROR)
5115 /* PR c++/28266 Make sure this is expanded only once. */
5116 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5120 gimplify_and_add (init, pre_p);
5122 /* If needed, push the cleanup for the temp. */
5123 if (TARGET_EXPR_CLEANUP (targ))
5124 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5125 CLEANUP_EH_ONLY (targ), pre_p);
5127 /* Only expand this once. */
5128 TREE_OPERAND (targ, 3) = init;
5129 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5132 /* We should have expanded this before. */
5133 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5139 /* Gimplification of expression trees. */
5141 /* Gimplify an expression which appears at statement context. The
5142 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
5143 NULL, a new sequence is allocated.
5145 Return true if we actually added a statement to the queue. */
5148 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5150 gimple_seq_node last;
5153 *seq_p = gimple_seq_alloc ();
5155 last = gimple_seq_last (*seq_p);
5156 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5157 return last != gimple_seq_last (*seq_p);
5161 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5162 to CTX. If entries already exist, force them to be some flavor of private.
5163 If there is no enclosing parallel, do nothing. */
5166 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5170 if (decl == NULL || !DECL_P (decl))
5175 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5178 if (n->value & GOVD_SHARED)
5179 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5183 else if (ctx->region_type != ORT_WORKSHARE)
5184 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5186 ctx = ctx->outer_context;
5191 /* Similarly for each of the type sizes of TYPE. */
5194 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5196 if (type == NULL || type == error_mark_node)
5198 type = TYPE_MAIN_VARIANT (type);
5200 if (pointer_set_insert (ctx->privatized_types, type))
5203 switch (TREE_CODE (type))
5209 case FIXED_POINT_TYPE:
5210 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5211 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5215 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5216 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5221 case QUAL_UNION_TYPE:
5224 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5225 if (TREE_CODE (field) == FIELD_DECL)
5227 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5228 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5234 case REFERENCE_TYPE:
5235 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5242 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5243 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5244 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5247 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
5250 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5253 unsigned int nflags;
5256 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5259 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
5260 there are constructors involved somewhere. */
5261 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5262 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5265 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5268 /* We shouldn't be re-adding the decl with the same data
5270 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5271 /* The only combination of data sharing classes we should see is
5272 FIRSTPRIVATE and LASTPRIVATE. */
5273 nflags = n->value | flags;
5274 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5275 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5280 /* When adding a variable-sized variable, we have to handle all sorts
5281 of additional bits of data: the pointer replacement variable, and
5282 the parameters of the type. */
5283 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5285 /* Add the pointer replacement variable as PRIVATE if the variable
5286 replacement is private, else FIRSTPRIVATE since we'll need the
5287 address of the original variable either for SHARED, or for the
5288 copy into or out of the context. */
5289 if (!(flags & GOVD_LOCAL))
5291 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5292 nflags |= flags & GOVD_SEEN;
5293 t = DECL_VALUE_EXPR (decl);
5294 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5295 t = TREE_OPERAND (t, 0);
5296 gcc_assert (DECL_P (t));
5297 omp_add_variable (ctx, t, nflags);
5300 /* Add all of the variable and type parameters (which should have
5301 been gimplified to a formal temporary) as FIRSTPRIVATE. */
5302 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5303 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5304 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5306 /* The variable-sized variable itself is never SHARED, only some form
5307 of PRIVATE. The sharing would take place via the pointer variable
5308 which we remapped above. */
5309 if (flags & GOVD_SHARED)
5310 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5311 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5313 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5314 alloca statement we generate for the variable, so make sure it
5315 is available. This isn't automatically needed for the SHARED
5316 case, since we won't be allocating local storage then.
5317 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5318 in this case omp_notice_variable will be called later
5319 on when it is gimplified. */
5320 else if (! (flags & GOVD_LOCAL))
5321 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5323 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5325 gcc_assert ((flags & GOVD_LOCAL) == 0);
5326 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5328 /* Similar to the direct variable sized case above, we'll need the
5329 size of references being privatized. */
5330 if ((flags & GOVD_SHARED) == 0)
5332 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5333 if (TREE_CODE (t) != INTEGER_CST)
5334 omp_notice_variable (ctx, t, true);
5338 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5341 /* Record the fact that DECL was used within the OpenMP context CTX.
5342 IN_CODE is true when real code uses DECL, and false when we should
5343 merely emit default(none) errors. Return true if DECL is going to
5344 be remapped and thus DECL shouldn't be gimplified into its
5345 DECL_VALUE_EXPR (if any). */
5348 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5351 unsigned flags = in_code ? GOVD_SEEN : 0;
5352 bool ret = false, shared;
5354 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5357 /* Threadprivate variables are predetermined. */
5358 if (is_global_var (decl))
5360 if (DECL_THREAD_LOCAL_P (decl))
5363 if (DECL_HAS_VALUE_EXPR_P (decl))
5365 tree value = get_base_address (DECL_VALUE_EXPR (decl));
5367 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5372 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5375 enum omp_clause_default_kind default_kind, kind;
5376 struct gimplify_omp_ctx *octx;
5378 if (ctx->region_type == ORT_WORKSHARE)
5381 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5382 remapped firstprivate instead of shared. To some extent this is
5383 addressed in omp_firstprivatize_type_sizes, but not effectively. */
5384 default_kind = ctx->default_kind;
5385 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5386 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5387 default_kind = kind;
5389 switch (default_kind)
5391 case OMP_CLAUSE_DEFAULT_NONE:
5392 error ("%qE not specified in enclosing parallel",
5394 error_at (ctx->location, "enclosing parallel");
5396 case OMP_CLAUSE_DEFAULT_SHARED:
5397 flags |= GOVD_SHARED;
5399 case OMP_CLAUSE_DEFAULT_PRIVATE:
5400 flags |= GOVD_PRIVATE;
5402 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5403 flags |= GOVD_FIRSTPRIVATE;
5405 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5406 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
5407 gcc_assert (ctx->region_type == ORT_TASK);
5408 if (ctx->outer_context)
5409 omp_notice_variable (ctx->outer_context, decl, in_code);
5410 for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5414 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5415 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5417 flags |= GOVD_FIRSTPRIVATE;
5420 if ((octx->region_type & ORT_PARALLEL) != 0)
5423 if (flags & GOVD_FIRSTPRIVATE)
5426 && (TREE_CODE (decl) == PARM_DECL
5427 || (!is_global_var (decl)
5428 && DECL_CONTEXT (decl) == current_function_decl)))
5430 flags |= GOVD_FIRSTPRIVATE;
5433 flags |= GOVD_SHARED;
5439 if ((flags & GOVD_PRIVATE)
5440 && lang_hooks.decls.omp_private_outer_ref (decl))
5441 flags |= GOVD_PRIVATE_OUTER_REF;
5443 omp_add_variable (ctx, decl, flags);
5445 shared = (flags & GOVD_SHARED) != 0;
5446 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5450 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5451 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5453 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5456 tree t = DECL_VALUE_EXPR (decl);
5457 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5458 t = TREE_OPERAND (t, 0);
5459 gcc_assert (DECL_P (t));
5460 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5461 n2->value |= GOVD_SEEN;
5464 shared = ((flags | n->value) & GOVD_SHARED) != 0;
5465 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5467 /* If nothing changed, there's nothing left to do. */
5468 if ((n->value & flags) == flags)
5474 /* If the variable is private in the current context, then we don't
5475 need to propagate anything to an outer context. */
5476 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5478 if (ctx->outer_context
5479 && omp_notice_variable (ctx->outer_context, decl, in_code))
5484 /* Verify that DECL is private within CTX. If there's specific information
5485 to the contrary in the innermost scope, generate an error. */
5488 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5492 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5495 if (n->value & GOVD_SHARED)
5497 if (ctx == gimplify_omp_ctxp)
5499 error ("iteration variable %qE should be private",
5501 n->value = GOVD_PRIVATE;
5507 else if ((n->value & GOVD_EXPLICIT) != 0
5508 && (ctx == gimplify_omp_ctxp
5509 || (ctx->region_type == ORT_COMBINED_PARALLEL
5510 && gimplify_omp_ctxp->outer_context == ctx)))
5512 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5513 error ("iteration variable %qE should not be firstprivate",
5515 else if ((n->value & GOVD_REDUCTION) != 0)
5516 error ("iteration variable %qE should not be reduction",
5519 return (ctx == gimplify_omp_ctxp
5520 || (ctx->region_type == ORT_COMBINED_PARALLEL
5521 && gimplify_omp_ctxp->outer_context == ctx));
5524 if (ctx->region_type != ORT_WORKSHARE)
5526 else if (ctx->outer_context)
5527 return omp_is_private (ctx->outer_context, decl);
5531 /* Return true if DECL is private within a parallel region
5532 that binds to the current construct's context or in parallel
5533 region's REDUCTION clause. */
5536 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5542 ctx = ctx->outer_context;
5544 return !(is_global_var (decl)
5545 /* References might be private, but might be shared too. */
5546 || lang_hooks.decls.omp_privatize_by_reference (decl));
5548 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5550 return (n->value & GOVD_SHARED) == 0;
5552 while (ctx->region_type == ORT_WORKSHARE);
5556 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5557 and previous omp contexts. */
5560 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5561 enum omp_region_type region_type)
5563 struct gimplify_omp_ctx *ctx, *outer_ctx;
5564 struct gimplify_ctx gctx;
5567 ctx = new_omp_context (region_type);
5568 outer_ctx = ctx->outer_context;
5570 while ((c = *list_p) != NULL)
5572 bool remove = false;
5573 bool notice_outer = true;
5574 const char *check_non_private = NULL;
5578 switch (OMP_CLAUSE_CODE (c))
5580 case OMP_CLAUSE_PRIVATE:
5581 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5582 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5584 flags |= GOVD_PRIVATE_OUTER_REF;
5585 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5588 notice_outer = false;
5590 case OMP_CLAUSE_SHARED:
5591 flags = GOVD_SHARED | GOVD_EXPLICIT;
5593 case OMP_CLAUSE_FIRSTPRIVATE:
5594 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5595 check_non_private = "firstprivate";
5597 case OMP_CLAUSE_LASTPRIVATE:
5598 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5599 check_non_private = "lastprivate";
5601 case OMP_CLAUSE_REDUCTION:
5602 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5603 check_non_private = "reduction";
5607 decl = OMP_CLAUSE_DECL (c);
5608 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5613 omp_add_variable (ctx, decl, flags);
5614 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5615 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5617 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5618 GOVD_LOCAL | GOVD_SEEN);
5619 gimplify_omp_ctxp = ctx;
5620 push_gimplify_context (&gctx);
5622 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5623 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5625 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5626 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5627 pop_gimplify_context
5628 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5629 push_gimplify_context (&gctx);
5630 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5631 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5632 pop_gimplify_context
5633 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5634 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5635 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5637 gimplify_omp_ctxp = outer_ctx;
5639 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5640 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5642 gimplify_omp_ctxp = ctx;
5643 push_gimplify_context (&gctx);
5644 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5646 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
5648 TREE_SIDE_EFFECTS (bind) = 1;
5649 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
5650 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
5652 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
5653 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5654 pop_gimplify_context
5655 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
5656 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
5658 gimplify_omp_ctxp = outer_ctx;
5664 case OMP_CLAUSE_COPYIN:
5665 case OMP_CLAUSE_COPYPRIVATE:
5666 decl = OMP_CLAUSE_DECL (c);
5667 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5674 omp_notice_variable (outer_ctx, decl, true);
5675 if (check_non_private
5676 && region_type == ORT_WORKSHARE
5677 && omp_check_private (ctx, decl))
5679 error ("%s variable %qE is private in outer context",
5680 check_non_private, DECL_NAME (decl));
5686 OMP_CLAUSE_OPERAND (c, 0)
5687 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5690 case OMP_CLAUSE_SCHEDULE:
5691 case OMP_CLAUSE_NUM_THREADS:
5692 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5693 is_gimple_val, fb_rvalue) == GS_ERROR)
5697 case OMP_CLAUSE_NOWAIT:
5698 case OMP_CLAUSE_ORDERED:
5699 case OMP_CLAUSE_UNTIED:
5700 case OMP_CLAUSE_COLLAPSE:
5703 case OMP_CLAUSE_DEFAULT:
5704 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5712 *list_p = OMP_CLAUSE_CHAIN (c);
5714 list_p = &OMP_CLAUSE_CHAIN (c);
5717 gimplify_omp_ctxp = ctx;
5720 /* For all variables that were not actually used within the context,
5721 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5724 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5726 tree *list_p = (tree *) data;
5727 tree decl = (tree) n->key;
5728 unsigned flags = n->value;
5729 enum omp_clause_code code;
5733 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5735 if ((flags & GOVD_SEEN) == 0)
5737 if (flags & GOVD_DEBUG_PRIVATE)
5739 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5740 private_debug = true;
5744 = lang_hooks.decls.omp_private_debug_clause (decl,
5745 !!(flags & GOVD_SHARED));
5747 code = OMP_CLAUSE_PRIVATE;
5748 else if (flags & GOVD_SHARED)
5750 if (is_global_var (decl))
5752 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5756 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5757 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5758 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5760 ctx = ctx->outer_context;
5765 code = OMP_CLAUSE_SHARED;
5767 else if (flags & GOVD_PRIVATE)
5768 code = OMP_CLAUSE_PRIVATE;
5769 else if (flags & GOVD_FIRSTPRIVATE)
5770 code = OMP_CLAUSE_FIRSTPRIVATE;
5774 clause = build_omp_clause (input_location, code);
5775 OMP_CLAUSE_DECL (clause) = decl;
5776 OMP_CLAUSE_CHAIN (clause) = *list_p;
5778 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5779 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
5780 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
5782 lang_hooks.decls.omp_finish_clause (clause);
5788 gimplify_adjust_omp_clauses (tree *list_p)
5790 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5793 while ((c = *list_p) != NULL)
5796 bool remove = false;
5798 switch (OMP_CLAUSE_CODE (c))
5800 case OMP_CLAUSE_PRIVATE:
5801 case OMP_CLAUSE_SHARED:
5802 case OMP_CLAUSE_FIRSTPRIVATE:
5803 decl = OMP_CLAUSE_DECL (c);
5804 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5805 remove = !(n->value & GOVD_SEEN);
5808 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5809 if ((n->value & GOVD_DEBUG_PRIVATE)
5810 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5812 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5813 || ((n->value & GOVD_DATA_SHARE_CLASS)
5815 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5816 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5821 case OMP_CLAUSE_LASTPRIVATE:
5822 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5823 accurately reflect the presence of a FIRSTPRIVATE clause. */
5824 decl = OMP_CLAUSE_DECL (c);
5825 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5826 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5827 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5830 case OMP_CLAUSE_REDUCTION:
5831 case OMP_CLAUSE_COPYIN:
5832 case OMP_CLAUSE_COPYPRIVATE:
5834 case OMP_CLAUSE_NUM_THREADS:
5835 case OMP_CLAUSE_SCHEDULE:
5836 case OMP_CLAUSE_NOWAIT:
5837 case OMP_CLAUSE_ORDERED:
5838 case OMP_CLAUSE_DEFAULT:
5839 case OMP_CLAUSE_UNTIED:
5840 case OMP_CLAUSE_COLLAPSE:
5848 *list_p = OMP_CLAUSE_CHAIN (c);
5850 list_p = &OMP_CLAUSE_CHAIN (c);
5853 /* Add in any implicit data sharing. */
5854 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5856 gimplify_omp_ctxp = ctx->outer_context;
5857 delete_omp_context (ctx);
5860 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5861 gimplification of the body, as well as scanning the body for used
5862 variables. We need to do this scan now, because variable-sized
5863 decls will be decomposed during gimplification. */
5866 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
5868 tree expr = *expr_p;
5870 gimple_seq body = NULL;
5871 struct gimplify_ctx gctx;
5873 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
5874 OMP_PARALLEL_COMBINED (expr)
5875 ? ORT_COMBINED_PARALLEL
5878 push_gimplify_context (&gctx);
5880 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
5881 if (gimple_code (g) == GIMPLE_BIND)
5882 pop_gimplify_context (g);
5884 pop_gimplify_context (NULL);
5886 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5888 g = gimple_build_omp_parallel (body,
5889 OMP_PARALLEL_CLAUSES (expr),
5890 NULL_TREE, NULL_TREE);
5891 if (OMP_PARALLEL_COMBINED (expr))
5892 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
5893 gimplify_seq_add_stmt (pre_p, g);
5894 *expr_p = NULL_TREE;
5897 /* Gimplify the contents of an OMP_TASK statement. This involves
5898 gimplification of the body, as well as scanning the body for used
5899 variables. We need to do this scan now, because variable-sized
5900 decls will be decomposed during gimplification. */
5903 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
5905 tree expr = *expr_p;
5907 gimple_seq body = NULL;
5908 struct gimplify_ctx gctx;
5910 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
5912 push_gimplify_context (&gctx);
5914 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
5915 if (gimple_code (g) == GIMPLE_BIND)
5916 pop_gimplify_context (g);
5918 pop_gimplify_context (NULL);
5920 gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
5922 g = gimple_build_omp_task (body,
5923 OMP_TASK_CLAUSES (expr),
5924 NULL_TREE, NULL_TREE,
5925 NULL_TREE, NULL_TREE, NULL_TREE);
5926 gimplify_seq_add_stmt (pre_p, g);
5927 *expr_p = NULL_TREE;
5930 /* Gimplify the gross structure of an OMP_FOR statement. */
5932 static enum gimplify_status
5933 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
5935 tree for_stmt, decl, var, t;
5936 enum gimplify_status ret = GS_ALL_DONE;
5937 enum gimplify_status tret;
5939 gimple_seq for_body, for_pre_body;
5944 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
5947 /* Handle OMP_FOR_INIT. */
5948 for_pre_body = NULL;
5949 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
5950 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
5952 for_body = gimple_seq_alloc ();
5953 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5954 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
5955 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
5956 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
5957 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
5959 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
5960 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
5961 decl = TREE_OPERAND (t, 0);
5962 gcc_assert (DECL_P (decl));
5963 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
5964 || POINTER_TYPE_P (TREE_TYPE (decl)));
5966 /* Make sure the iteration variable is private. */
5967 if (omp_is_private (gimplify_omp_ctxp, decl))
5968 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5970 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5972 /* If DECL is not a gimple register, create a temporary variable to act
5973 as an iteration counter. This is valid, since DECL cannot be
5974 modified in the body of the loop. */
5975 if (!is_gimple_reg (decl))
5977 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5978 TREE_OPERAND (t, 0) = var;
5980 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
5982 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5987 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5988 is_gimple_val, fb_rvalue);
5989 ret = MIN (ret, tret);
5990 if (ret == GS_ERROR)
5993 /* Handle OMP_FOR_COND. */
5994 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
5995 gcc_assert (COMPARISON_CLASS_P (t));
5996 gcc_assert (TREE_OPERAND (t, 0) == decl);
5998 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
5999 is_gimple_val, fb_rvalue);
6000 ret = MIN (ret, tret);
6002 /* Handle OMP_FOR_INCR. */
6003 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6004 switch (TREE_CODE (t))
6006 case PREINCREMENT_EXPR:
6007 case POSTINCREMENT_EXPR:
6008 t = build_int_cst (TREE_TYPE (decl), 1);
6009 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6010 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6011 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6014 case PREDECREMENT_EXPR:
6015 case POSTDECREMENT_EXPR:
6016 t = build_int_cst (TREE_TYPE (decl), -1);
6017 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6018 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6019 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6023 gcc_assert (TREE_OPERAND (t, 0) == decl);
6024 TREE_OPERAND (t, 0) = var;
6026 t = TREE_OPERAND (t, 1);
6027 switch (TREE_CODE (t))
6030 if (TREE_OPERAND (t, 1) == decl)
6032 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6033 TREE_OPERAND (t, 0) = var;
6039 case POINTER_PLUS_EXPR:
6040 gcc_assert (TREE_OPERAND (t, 0) == decl);
6041 TREE_OPERAND (t, 0) = var;
6047 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6048 is_gimple_val, fb_rvalue);
6049 ret = MIN (ret, tret);
6056 if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6059 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6060 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6061 && OMP_CLAUSE_DECL (c) == decl
6062 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6064 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6065 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6066 gcc_assert (TREE_OPERAND (t, 0) == var);
6067 t = TREE_OPERAND (t, 1);
6068 gcc_assert (TREE_CODE (t) == PLUS_EXPR
6069 || TREE_CODE (t) == MINUS_EXPR
6070 || TREE_CODE (t) == POINTER_PLUS_EXPR);
6071 gcc_assert (TREE_OPERAND (t, 0) == var);
6072 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6073 TREE_OPERAND (t, 1));
6074 gimplify_assign (decl, t,
6075 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6080 gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6082 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6084 gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6085 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6088 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6090 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6091 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6092 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6093 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6094 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6095 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6096 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6097 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6100 gimplify_seq_add_stmt (pre_p, gfor);
6101 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6104 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6105 In particular, OMP_SECTIONS and OMP_SINGLE. */
6108 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6110 tree expr = *expr_p;
6112 gimple_seq body = NULL;
6114 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6115 gimplify_and_add (OMP_BODY (expr), &body);
6116 gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6118 if (TREE_CODE (expr) == OMP_SECTIONS)
6119 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6120 else if (TREE_CODE (expr) == OMP_SINGLE)
6121 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6125 gimplify_seq_add_stmt (pre_p, stmt);
6128 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
6129 stabilized the lhs of the atomic operation as *ADDR. Return true if
6130 EXPR is this stabilized form. */
6133 goa_lhs_expr_p (tree expr, tree addr)
6135 /* Also include casts to other type variants. The C front end is fond
6136 of adding these for e.g. volatile variables. This is like
6137 STRIP_TYPE_NOPS but includes the main variant lookup. */
6138 while ((CONVERT_EXPR_P (expr)
6139 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6140 && TREE_OPERAND (expr, 0) != error_mark_node
6141 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6142 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
6143 expr = TREE_OPERAND (expr, 0);
6145 if (TREE_CODE (expr) == INDIRECT_REF)
6147 expr = TREE_OPERAND (expr, 0);
6149 && (CONVERT_EXPR_P (expr)
6150 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6151 && TREE_CODE (expr) == TREE_CODE (addr)
6152 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
6153 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
6155 expr = TREE_OPERAND (expr, 0);
6156 addr = TREE_OPERAND (addr, 0);
6160 return (TREE_CODE (addr) == ADDR_EXPR
6161 && TREE_CODE (expr) == ADDR_EXPR
6162 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6164 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6169 /* Walk *EXPR_P and replace
6170 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
6171 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
6172 a subexpression, 0 if it did not, or -1 if an error was encountered. */
6175 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6178 tree expr = *expr_p;
6181 if (goa_lhs_expr_p (expr, lhs_addr))
6186 if (is_gimple_val (expr))
6190 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6193 case tcc_comparison:
6194 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6197 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6200 case tcc_expression:
6201 switch (TREE_CODE (expr))
6203 case TRUTH_ANDIF_EXPR:
6204 case TRUTH_ORIF_EXPR:
6205 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6207 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6220 enum gimplify_status gs;
6221 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6222 if (gs != GS_ALL_DONE)
6230 /* Gimplify an OMP_ATOMIC statement. */
6232 static enum gimplify_status
6233 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6235 tree addr = TREE_OPERAND (*expr_p, 0);
6236 tree rhs = TREE_OPERAND (*expr_p, 1);
6237 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6240 tmp_load = create_tmp_var (type, NULL);
6241 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6244 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6248 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
6249 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6252 gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
6259 /* Converts the GENERIC expression tree *EXPR_P to GIMPLE. If the
6260 expression produces a value to be used as an operand inside a GIMPLE
6261 statement, the value will be stored back in *EXPR_P. This value will
6262 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6263 an SSA_NAME. The corresponding sequence of GIMPLE statements is
6264 emitted in PRE_P and POST_P.
6266 Additionally, this process may overwrite parts of the input
6267 expression during gimplification. Ideally, it should be
6268 possible to do non-destructive gimplification.
6270 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
6271 the expression needs to evaluate to a value to be used as
6272 an operand in a GIMPLE statement, this value will be stored in
6273 *EXPR_P on exit. This happens when the caller specifies one
6274 of fb_lvalue or fb_rvalue fallback flags.
6276 PRE_P will contain the sequence of GIMPLE statements corresponding
6277 to the evaluation of EXPR and all the side-effects that must
6278 be executed before the main expression. On exit, the last
6279 statement of PRE_P is the core statement being gimplified. For
6280 instance, when gimplifying 'if (++a)' the last statement in
6281 PRE_P will be 'if (t.1)' where t.1 is the result of
6282 pre-incrementing 'a'.
6284 POST_P will contain the sequence of GIMPLE statements corresponding
6285 to the evaluation of all the side-effects that must be executed
6286 after the main expression. If this is NULL, the post
6287 side-effects are stored at the end of PRE_P.
6289 The reason why the output is split in two is to handle post
6290 side-effects explicitly. In some cases, an expression may have
6291 inner and outer post side-effects which need to be emitted in
6292 an order different from the one given by the recursive
6293 traversal. For instance, for the expression (*p--)++ the post
6294 side-effects of '--' must actually occur *after* the post
6295 side-effects of '++'. However, gimplification will first visit
6296 the inner expression, so if a separate POST sequence was not
6297 used, the resulting sequence would be:
6304 However, the post-decrement operation in line #2 must not be
6305 evaluated until after the store to *p at line #4, so the
6306 correct sequence should be:
6313 So, by specifying a separate post queue, it is possible
6314 to emit the post side-effects in the correct order.
6315 If POST_P is NULL, an internal queue will be used. Before
6316 returning to the caller, the sequence POST_P is appended to
6317 the main output sequence PRE_P.
6319 GIMPLE_TEST_F points to a function that takes a tree T and
6320 returns nonzero if T is in the GIMPLE form requested by the
6321 caller. The GIMPLE predicates are in tree-gimple.c.
6323 FALLBACK tells the function what sort of a temporary we want if
6324 gimplification cannot produce an expression that complies with
6327 fb_none means that no temporary should be generated
6328 fb_rvalue means that an rvalue is OK to generate
6329 fb_lvalue means that an lvalue is OK to generate
6330 fb_either means that either is OK, but an lvalue is preferable.
6331 fb_mayfail means that gimplification may fail (in which case
6332 GS_ERROR will be returned)
6334 The return value is either GS_ERROR or GS_ALL_DONE, since this
6335 function iterates until EXPR is completely gimplified or an error
6338 enum gimplify_status
6339 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6340 bool (*gimple_test_f) (tree), fallback_t fallback)
6343 gimple_seq internal_pre = NULL;
6344 gimple_seq internal_post = NULL;
6347 location_t saved_location;
6348 enum gimplify_status ret;
6349 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6351 save_expr = *expr_p;
6352 if (save_expr == NULL_TREE)
6355 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
6356 is_statement = gimple_test_f == is_gimple_stmt;
6360 /* Consistency checks. */
6361 if (gimple_test_f == is_gimple_reg)
6362 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6363 else if (gimple_test_f == is_gimple_val
6364 || gimple_test_f == is_gimple_call_addr
6365 || gimple_test_f == is_gimple_condexpr
6366 || gimple_test_f == is_gimple_mem_rhs
6367 || gimple_test_f == is_gimple_mem_rhs_or_call
6368 || gimple_test_f == is_gimple_reg_rhs
6369 || gimple_test_f == is_gimple_reg_rhs_or_call
6370 || gimple_test_f == is_gimple_asm_val)
6371 gcc_assert (fallback & fb_rvalue);
6372 else if (gimple_test_f == is_gimple_min_lval
6373 || gimple_test_f == is_gimple_lvalue)
6374 gcc_assert (fallback & fb_lvalue);
6375 else if (gimple_test_f == is_gimple_addressable)
6376 gcc_assert (fallback & fb_either);
6377 else if (gimple_test_f == is_gimple_stmt)
6378 gcc_assert (fallback == fb_none);
6381 /* We should have recognized the GIMPLE_TEST_F predicate to
6382 know what kind of fallback to use in case a temporary is
6383 needed to hold the value or address of *EXPR_P. */
6387 /* We used to check the predicate here and return immediately if it
6388 succeeds. This is wrong; the design is for gimplification to be
6389 idempotent, and for the predicates to only test for valid forms, not
6390 whether they are fully simplified. */
6392 pre_p = &internal_pre;
6395 post_p = &internal_post;
6397 /* Remember the last statements added to PRE_P and POST_P. Every
6398 new statement added by the gimplification helpers needs to be
6399 annotated with location information. To centralize the
6400 responsibility, we remember the last statement that had been
6401 added to both queues before gimplifying *EXPR_P. If
6402 gimplification produces new statements in PRE_P and POST_P, those
6403 statements will be annotated with the same location information
6405 pre_last_gsi = gsi_last (*pre_p);
6406 post_last_gsi = gsi_last (*post_p);
6408 saved_location = input_location;
6409 if (save_expr != error_mark_node
6410 && EXPR_HAS_LOCATION (*expr_p))
6411 input_location = EXPR_LOCATION (*expr_p);
6413 /* Loop over the specific gimplifiers until the toplevel node
6414 remains the same. */
6417 /* Strip away as many useless type conversions as possible
6419 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6421 /* Remember the expr. */
6422 save_expr = *expr_p;
6424 /* Die, die, die, my darling. */
6425 if (save_expr == error_mark_node
6426 || (TREE_TYPE (save_expr)
6427 && TREE_TYPE (save_expr) == error_mark_node))
6433 /* Do any language-specific gimplification. */
6434 ret = ((enum gimplify_status)
6435 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6438 if (*expr_p == NULL_TREE)
6440 if (*expr_p != save_expr)
6443 else if (ret != GS_UNHANDLED)
6447 switch (TREE_CODE (*expr_p))
6449 /* First deal with the special cases. */
6451 case POSTINCREMENT_EXPR:
6452 case POSTDECREMENT_EXPR:
6453 case PREINCREMENT_EXPR:
6454 case PREDECREMENT_EXPR:
6455 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6456 fallback != fb_none);
6460 case ARRAY_RANGE_REF:
6464 case VIEW_CONVERT_EXPR:
6465 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6466 fallback ? fallback : fb_rvalue);
6470 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6472 /* C99 code may assign to an array in a structure value of a
6473 conditional expression, and this has undefined behavior
6474 only on execution, so create a temporary if an lvalue is
6476 if (fallback == fb_lvalue)
6478 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6479 mark_addressable (*expr_p);
6484 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6486 /* C99 code may assign to an array in a structure returned
6487 from a function, and this has undefined behavior only on
6488 execution, so create a temporary if an lvalue is
6490 if (fallback == fb_lvalue)
6492 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6493 mark_addressable (*expr_p);
6501 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6504 case COMPOUND_LITERAL_EXPR:
6505 ret = gimplify_compound_literal_expr (expr_p, pre_p);
6510 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6511 fallback != fb_none);
6514 case TRUTH_ANDIF_EXPR:
6515 case TRUTH_ORIF_EXPR:
6516 /* Pass the source location of the outer expression. */
6517 ret = gimplify_boolean_expr (expr_p, saved_location);
6520 case TRUTH_NOT_EXPR:
6521 if (TREE_CODE (TREE_TYPE (*expr_p)) != BOOLEAN_TYPE)
6523 tree type = TREE_TYPE (*expr_p);
6524 *expr_p = fold_convert (type, gimple_boolify (*expr_p));
6529 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6530 is_gimple_val, fb_rvalue);
6531 recalculate_side_effects (*expr_p);
6535 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6539 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6543 if (IS_EMPTY_STMT (*expr_p))
6549 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
6550 || fallback == fb_none)
6552 /* Just strip a conversion to void (or in void context) and
6554 *expr_p = TREE_OPERAND (*expr_p, 0);
6558 ret = gimplify_conversion (expr_p);
6559 if (ret == GS_ERROR)
6561 if (*expr_p != save_expr)
6565 case FIX_TRUNC_EXPR:
6566 /* unary_expr: ... | '(' cast ')' val | ... */
6567 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6568 is_gimple_val, fb_rvalue);
6569 recalculate_side_effects (*expr_p);
6573 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
6574 if (*expr_p != save_expr)
6576 /* else fall through. */
6577 case ALIGN_INDIRECT_REF:
6578 case MISALIGNED_INDIRECT_REF:
6579 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6580 is_gimple_reg, fb_rvalue);
6581 recalculate_side_effects (*expr_p);
6584 /* Constants need not be gimplified. */
6595 /* If we require an lvalue, such as for ADDR_EXPR, retain the
6596 CONST_DECL node. Otherwise the decl is replaceable by its
6598 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
6599 if (fallback & fb_lvalue)
6602 *expr_p = DECL_INITIAL (*expr_p);
6606 ret = gimplify_decl_expr (expr_p, pre_p);
6610 /* FIXME make this a decl. */
6615 ret = gimplify_bind_expr (expr_p, pre_p);
6619 ret = gimplify_loop_expr (expr_p, pre_p);
6623 ret = gimplify_switch_expr (expr_p, pre_p);
6627 ret = gimplify_exit_expr (expr_p);
6631 /* If the target is not LABEL, then it is a computed jump
6632 and the target needs to be gimplified. */
6633 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
6635 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
6636 NULL, is_gimple_val, fb_rvalue);
6637 if (ret == GS_ERROR)
6640 gimplify_seq_add_stmt (pre_p,
6641 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
6645 gimplify_seq_add_stmt (pre_p,
6646 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
6647 PREDICT_EXPR_OUTCOME (*expr_p)));
6653 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
6654 == current_function_decl);
6655 gimplify_seq_add_stmt (pre_p,
6656 gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
6659 case CASE_LABEL_EXPR:
6660 ret = gimplify_case_label_expr (expr_p, pre_p);
6664 ret = gimplify_return_expr (*expr_p, pre_p);
6668 /* Don't reduce this in place; let gimplify_init_constructor work its
6669 magic. Buf if we're just elaborating this for side effects, just
6670 gimplify any element that has side-effects. */
6671 if (fallback == fb_none)
6673 unsigned HOST_WIDE_INT ix;
6674 constructor_elt *ce;
6675 tree temp = NULL_TREE;
6677 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
6680 if (TREE_SIDE_EFFECTS (ce->value))
6681 append_to_statement_list (ce->value, &temp);
6686 /* C99 code may assign to an array in a constructed
6687 structure or union, and this has undefined behavior only
6688 on execution, so create a temporary if an lvalue is
6690 else if (fallback == fb_lvalue)
6692 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6693 mark_addressable (*expr_p);
6699 /* The following are special cases that are not handled by the
6700 original GIMPLE grammar. */
6702 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
6705 ret = gimplify_save_expr (expr_p, pre_p, post_p);
6710 enum gimplify_status r0, r1, r2;
6712 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6713 post_p, is_gimple_lvalue, fb_either);
6714 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6715 post_p, is_gimple_val, fb_rvalue);
6716 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
6717 post_p, is_gimple_val, fb_rvalue);
6718 recalculate_side_effects (*expr_p);
6720 ret = MIN (r0, MIN (r1, r2));
6724 case TARGET_MEM_REF:
6726 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
6728 if (TMR_SYMBOL (*expr_p))
6729 r0 = gimplify_expr (&TMR_SYMBOL (*expr_p), pre_p,
6730 post_p, is_gimple_lvalue, fb_either);
6731 else if (TMR_BASE (*expr_p))
6732 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
6733 post_p, is_gimple_val, fb_either);
6734 if (TMR_INDEX (*expr_p))
6735 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
6736 post_p, is_gimple_val, fb_rvalue);
6737 /* TMR_STEP and TMR_OFFSET are always integer constants. */
6742 case NON_LVALUE_EXPR:
6743 /* This should have been stripped above. */
6747 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
6750 case TRY_FINALLY_EXPR:
6751 case TRY_CATCH_EXPR:
6753 gimple_seq eval, cleanup;
6756 eval = cleanup = NULL;
6757 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
6758 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
6759 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
6760 if (gimple_seq_empty_p (cleanup))
6762 gimple_seq_add_seq (pre_p, eval);
6766 try_ = gimple_build_try (eval, cleanup,
6767 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
6768 ? GIMPLE_TRY_FINALLY
6769 : GIMPLE_TRY_CATCH);
6770 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
6771 gimple_try_set_catch_is_cleanup (try_,
6772 TRY_CATCH_IS_CLEANUP (*expr_p));
6773 gimplify_seq_add_stmt (pre_p, try_);
6778 case CLEANUP_POINT_EXPR:
6779 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
6783 ret = gimplify_target_expr (expr_p, pre_p, post_p);
6789 gimple_seq handler = NULL;
6790 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
6791 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
6792 gimplify_seq_add_stmt (pre_p, c);
6797 case EH_FILTER_EXPR:
6800 gimple_seq failure = NULL;
6802 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
6803 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
6804 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
6805 gimple_eh_filter_set_must_not_throw
6806 (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
6807 gimplify_seq_add_stmt (pre_p, ehf);
6814 enum gimplify_status r0, r1;
6815 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
6816 post_p, is_gimple_val, fb_rvalue);
6817 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
6818 post_p, is_gimple_val, fb_rvalue);
6819 TREE_SIDE_EFFECTS (*expr_p) = 0;
6825 /* We get here when taking the address of a label. We mark
6826 the label as "forced"; meaning it can never be removed and
6827 it is a potential target for any computed goto. */
6828 FORCED_LABEL (*expr_p) = 1;
6832 case STATEMENT_LIST:
6833 ret = gimplify_statement_list (expr_p, pre_p);
6836 case WITH_SIZE_EXPR:
6838 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6839 post_p == &internal_post ? NULL : post_p,
6840 gimple_test_f, fallback);
6841 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6842 is_gimple_val, fb_rvalue);
6848 ret = gimplify_var_or_parm_decl (expr_p);
6852 /* When within an OpenMP context, notice uses of variables. */
6853 if (gimplify_omp_ctxp)
6854 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
6859 /* Allow callbacks into the gimplifier during optimization. */
6864 gimplify_omp_parallel (expr_p, pre_p);
6869 gimplify_omp_task (expr_p, pre_p);
6874 ret = gimplify_omp_for (expr_p, pre_p);
6879 gimplify_omp_workshare (expr_p, pre_p);
6888 gimple_seq body = NULL;
6891 gimplify_and_add (OMP_BODY (*expr_p), &body);
6892 switch (TREE_CODE (*expr_p))
6895 g = gimple_build_omp_section (body);
6898 g = gimple_build_omp_master (body);
6901 g = gimple_build_omp_ordered (body);
6904 g = gimple_build_omp_critical (body,
6905 OMP_CRITICAL_NAME (*expr_p));
6910 gimplify_seq_add_stmt (pre_p, g);
6916 ret = gimplify_omp_atomic (expr_p, pre_p);
6919 case POINTER_PLUS_EXPR:
6920 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6921 The second is gimple immediate saving a need for extra statement.
6923 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6924 && (tmp = maybe_fold_offset_to_address
6925 (EXPR_LOCATION (*expr_p),
6926 TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6927 TREE_TYPE (*expr_p))))
6932 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6933 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6934 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6935 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6937 && (tmp = maybe_fold_offset_to_address
6938 (EXPR_LOCATION (*expr_p),
6939 TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6940 TREE_OPERAND (*expr_p, 1),
6941 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6944 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6950 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6952 case tcc_comparison:
6953 /* Handle comparison of objects of non scalar mode aggregates
6954 with a call to memcmp. It would be nice to only have to do
6955 this for variable-sized objects, but then we'd have to allow
6956 the same nest of reference nodes we allow for MODIFY_EXPR and
6959 Compare scalar mode aggregates as scalar mode values. Using
6960 memcmp for them would be very inefficient at best, and is
6961 plain wrong if bitfields are involved. */
6963 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6965 if (!AGGREGATE_TYPE_P (type))
6967 else if (TYPE_MODE (type) != BLKmode)
6968 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6970 ret = gimplify_variable_sized_compare (expr_p);
6975 /* If *EXPR_P does not need to be special-cased, handle it
6976 according to its class. */
6978 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6979 post_p, is_gimple_val, fb_rvalue);
6985 enum gimplify_status r0, r1;
6987 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6988 post_p, is_gimple_val, fb_rvalue);
6989 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6990 post_p, is_gimple_val, fb_rvalue);
6996 case tcc_declaration:
6999 goto dont_recalculate;
7002 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
7003 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
7004 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
7008 recalculate_side_effects (*expr_p);
7014 /* If we replaced *expr_p, gimplify again. */
7015 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
7018 while (ret == GS_OK);
7020 /* If we encountered an error_mark somewhere nested inside, either
7021 stub out the statement or propagate the error back out. */
7022 if (ret == GS_ERROR)
7029 /* This was only valid as a return value from the langhook, which
7030 we handled. Make sure it doesn't escape from any other context. */
7031 gcc_assert (ret != GS_UNHANDLED);
7033 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7035 /* We aren't looking for a value, and we don't have a valid
7036 statement. If it doesn't have side-effects, throw it away. */
7037 if (!TREE_SIDE_EFFECTS (*expr_p))
7039 else if (!TREE_THIS_VOLATILE (*expr_p))
7041 /* This is probably a _REF that contains something nested that
7042 has side effects. Recurse through the operands to find it. */
7043 enum tree_code code = TREE_CODE (*expr_p);
7050 case VIEW_CONVERT_EXPR:
7051 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7052 gimple_test_f, fallback);
7056 case ARRAY_RANGE_REF:
7057 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7058 gimple_test_f, fallback);
7059 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7060 gimple_test_f, fallback);
7064 /* Anything else with side-effects must be converted to
7065 a valid statement before we get here. */
7071 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7072 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7074 /* Historically, the compiler has treated a bare reference
7075 to a non-BLKmode volatile lvalue as forcing a load. */
7076 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7078 /* Normally, we do not want to create a temporary for a
7079 TREE_ADDRESSABLE type because such a type should not be
7080 copied by bitwise-assignment. However, we make an
7081 exception here, as all we are doing here is ensuring that
7082 we read the bytes that make up the type. We use
7083 create_tmp_var_raw because create_tmp_var will abort when
7084 given a TREE_ADDRESSABLE type. */
7085 tree tmp = create_tmp_var_raw (type, "vol");
7086 gimple_add_tmp_var (tmp);
7087 gimplify_assign (tmp, *expr_p, pre_p);
7091 /* We can't do anything useful with a volatile reference to
7092 an incomplete type, so just throw it away. Likewise for
7093 a BLKmode type, since any implicit inner load should
7094 already have been turned into an explicit one by the
7095 gimplification process. */
7099 /* If we are gimplifying at the statement level, we're done. Tack
7100 everything together and return. */
7101 if (fallback == fb_none || is_statement)
7103 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7104 it out for GC to reclaim it. */
7105 *expr_p = NULL_TREE;
7107 if (!gimple_seq_empty_p (internal_pre)
7108 || !gimple_seq_empty_p (internal_post))
7110 gimplify_seq_add_seq (&internal_pre, internal_post);
7111 gimplify_seq_add_seq (pre_p, internal_pre);
7114 /* The result of gimplifying *EXPR_P is going to be the last few
7115 statements in *PRE_P and *POST_P. Add location information
7116 to all the statements that were added by the gimplification
7118 if (!gimple_seq_empty_p (*pre_p))
7119 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7121 if (!gimple_seq_empty_p (*post_p))
7122 annotate_all_with_location_after (*post_p, post_last_gsi,
7128 #ifdef ENABLE_GIMPLE_CHECKING
7131 enum tree_code code = TREE_CODE (*expr_p);
7132 /* These expressions should already be in gimple IR form. */
7133 gcc_assert (code != MODIFY_EXPR
7135 && code != BIND_EXPR
7136 && code != CATCH_EXPR
7137 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7138 && code != EH_FILTER_EXPR
7139 && code != GOTO_EXPR
7140 && code != LABEL_EXPR
7141 && code != LOOP_EXPR
7142 && code != RESX_EXPR
7143 && code != SWITCH_EXPR
7144 && code != TRY_FINALLY_EXPR
7145 && code != OMP_CRITICAL
7147 && code != OMP_MASTER
7148 && code != OMP_ORDERED
7149 && code != OMP_PARALLEL
7150 && code != OMP_SECTIONS
7151 && code != OMP_SECTION
7152 && code != OMP_SINGLE);
7156 /* Otherwise we're gimplifying a subexpression, so the resulting
7157 value is interesting. If it's a valid operand that matches
7158 GIMPLE_TEST_F, we're done. Unless we are handling some
7159 post-effects internally; if that's the case, we need to copy into
7160 a temporary before adding the post-effects to POST_P. */
7161 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7164 /* Otherwise, we need to create a new temporary for the gimplified
7167 /* We can't return an lvalue if we have an internal postqueue. The
7168 object the lvalue refers to would (probably) be modified by the
7169 postqueue; we need to copy the value out first, which means an
7171 if ((fallback & fb_lvalue)
7172 && gimple_seq_empty_p (internal_post)
7173 && is_gimple_addressable (*expr_p))
7175 /* An lvalue will do. Take the address of the expression, store it
7176 in a temporary, and replace the expression with an INDIRECT_REF of
7178 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7179 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7180 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
7182 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7184 /* An rvalue will do. Assign the gimplified expression into a
7185 new temporary TMP and replace the original expression with
7186 TMP. First, make sure that the expression has a type so that
7187 it can be assigned into a temporary. */
7188 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7190 if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7191 /* The postqueue might change the value of the expression between
7192 the initialization and use of the temporary, so we can't use a
7193 formal temp. FIXME do we care? */
7195 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7196 if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7197 || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7198 DECL_GIMPLE_REG_P (*expr_p) = 1;
7201 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
7205 #ifdef ENABLE_GIMPLE_CHECKING
7206 if (!(fallback & fb_mayfail))
7208 fprintf (stderr, "gimplification failed:\n");
7209 print_generic_expr (stderr, *expr_p, 0);
7210 debug_tree (*expr_p);
7211 internal_error ("gimplification failed");
7214 gcc_assert (fallback & fb_mayfail);
7216 /* If this is an asm statement, and the user asked for the
7217 impossible, don't die. Fail and let gimplify_asm_expr
7223 /* Make sure the temporary matches our predicate. */
7224 gcc_assert ((*gimple_test_f) (*expr_p));
7226 if (!gimple_seq_empty_p (internal_post))
7228 annotate_all_with_location (internal_post, input_location);
7229 gimplify_seq_add_seq (pre_p, internal_post);
7233 input_location = saved_location;
7237 /* Look through TYPE for variable-sized objects and gimplify each such
7238 size that we find. Add to LIST_P any statements generated. */
7241 gimplify_type_sizes (tree type, gimple_seq *list_p)
7245 if (type == NULL || type == error_mark_node)
7248 /* We first do the main variant, then copy into any other variants. */
7249 type = TYPE_MAIN_VARIANT (type);
7251 /* Avoid infinite recursion. */
7252 if (TYPE_SIZES_GIMPLIFIED (type))
7255 TYPE_SIZES_GIMPLIFIED (type) = 1;
7257 switch (TREE_CODE (type))
7263 case FIXED_POINT_TYPE:
7264 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7265 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7267 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7269 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7270 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7275 /* These types may not have declarations, so handle them here. */
7276 gimplify_type_sizes (TREE_TYPE (type), list_p);
7277 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7278 /* When not optimizing, ensure VLA bounds aren't removed. */
7280 && TYPE_DOMAIN (type)
7281 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7283 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7284 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7285 DECL_IGNORED_P (t) = 0;
7286 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7287 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7288 DECL_IGNORED_P (t) = 0;
7294 case QUAL_UNION_TYPE:
7295 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7296 if (TREE_CODE (field) == FIELD_DECL)
7298 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7299 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7300 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7301 gimplify_type_sizes (TREE_TYPE (field), list_p);
7306 case REFERENCE_TYPE:
7307 /* We used to recurse on the pointed-to type here, which turned out to
7308 be incorrect because its definition might refer to variables not
7309 yet initialized at this point if a forward declaration is involved.
7311 It was actually useful for anonymous pointed-to types to ensure
7312 that the sizes evaluation dominates every possible later use of the
7313 values. Restricting to such types here would be safe since there
7314 is no possible forward declaration around, but would introduce an
7315 undesirable middle-end semantic to anonymity. We then defer to
7316 front-ends the responsibility of ensuring that the sizes are
7317 evaluated both early and late enough, e.g. by attaching artificial
7318 type declarations to the tree. */
7325 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7326 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7328 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7330 TYPE_SIZE (t) = TYPE_SIZE (type);
7331 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7332 TYPE_SIZES_GIMPLIFIED (t) = 1;
7336 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7337 a size or position, has had all of its SAVE_EXPRs evaluated.
7338 We add any required statements to *STMT_P. */
7341 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7343 tree type, expr = *expr_p;
7345 /* We don't do anything if the value isn't there, is constant, or contains
7346 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
7347 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
7348 will want to replace it with a new variable, but that will cause problems
7349 if this type is from outside the function. It's OK to have that here. */
7350 if (expr == NULL_TREE || TREE_CONSTANT (expr)
7351 || TREE_CODE (expr) == VAR_DECL
7352 || CONTAINS_PLACEHOLDER_P (expr))
7355 type = TREE_TYPE (expr);
7356 *expr_p = unshare_expr (expr);
7358 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7361 /* Verify that we've an exact type match with the original expression.
7362 In particular, we do not wish to drop a "sizetype" in favour of a
7363 type of similar dimensions. We don't want to pollute the generic
7364 type-stripping code with this knowledge because it doesn't matter
7365 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
7366 and friends retain their "sizetype-ness". */
7367 if (TREE_TYPE (expr) != type
7368 && TREE_CODE (type) == INTEGER_TYPE
7369 && TYPE_IS_SIZETYPE (type))
7374 *expr_p = create_tmp_var (type, NULL);
7375 tmp = build1 (NOP_EXPR, type, expr);
7376 stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7377 if (EXPR_HAS_LOCATION (expr))
7378 gimple_set_location (stmt, EXPR_LOCATION (expr));
7380 gimple_set_location (stmt, input_location);
7385 /* Gimplify the body of statements pointed to by BODY_P and return a
7386 GIMPLE_BIND containing the sequence of GIMPLE statements
7387 corresponding to BODY_P. FNDECL is the function decl containing
7391 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
7393 location_t saved_location = input_location;
7394 gimple_seq parm_stmts, seq;
7396 struct gimplify_ctx gctx;
7398 timevar_push (TV_TREE_GIMPLIFY);
7400 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7402 default_rtl_profile ();
7404 gcc_assert (gimplify_ctxp == NULL);
7405 push_gimplify_context (&gctx);
7407 /* Unshare most shared trees in the body and in that of any nested functions.
7408 It would seem we don't have to do this for nested functions because
7409 they are supposed to be output and then the outer function gimplified
7410 first, but the g++ front end doesn't always do it that way. */
7411 unshare_body (body_p, fndecl);
7412 unvisit_body (body_p, fndecl);
7414 if (cgraph_node (fndecl)->origin)
7415 nonlocal_vlas = pointer_set_create ();
7417 /* Make sure input_location isn't set to something weird. */
7418 input_location = DECL_SOURCE_LOCATION (fndecl);
7420 /* Resolve callee-copies. This has to be done before processing
7421 the body so that DECL_VALUE_EXPR gets processed correctly. */
7422 parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
7424 /* Gimplify the function's body. */
7426 gimplify_stmt (body_p, &seq);
7427 outer_bind = gimple_seq_first_stmt (seq);
7430 outer_bind = gimple_build_nop ();
7431 gimplify_seq_add_stmt (&seq, outer_bind);
7434 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
7435 not the case, wrap everything in a GIMPLE_BIND to make it so. */
7436 if (gimple_code (outer_bind) == GIMPLE_BIND
7437 && gimple_seq_first (seq) == gimple_seq_last (seq))
7440 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
7442 *body_p = NULL_TREE;
7444 /* If we had callee-copies statements, insert them at the beginning
7446 if (!gimple_seq_empty_p (parm_stmts))
7448 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
7449 gimple_bind_set_body (outer_bind, parm_stmts);
7454 pointer_set_destroy (nonlocal_vlas);
7455 nonlocal_vlas = NULL;
7458 pop_gimplify_context (outer_bind);
7459 gcc_assert (gimplify_ctxp == NULL);
7461 #ifdef ENABLE_TYPES_CHECKING
7462 if (!errorcount && !sorrycount)
7463 verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
7466 timevar_pop (TV_TREE_GIMPLIFY);
7467 input_location = saved_location;
7472 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
7473 node for the function we want to gimplify.
7475 Returns the sequence of GIMPLE statements corresponding to the body
7479 gimplify_function_tree (tree fndecl)
7481 tree oldfn, parm, ret;
7485 oldfn = current_function_decl;
7486 current_function_decl = fndecl;
7487 if (DECL_STRUCT_FUNCTION (fndecl))
7488 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
7490 push_struct_function (fndecl);
7492 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
7494 /* Preliminarily mark non-addressed complex variables as eligible
7495 for promotion to gimple registers. We'll transform their uses
7497 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
7498 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
7499 && !TREE_THIS_VOLATILE (parm)
7500 && !needs_to_live_in_memory (parm))
7501 DECL_GIMPLE_REG_P (parm) = 1;
7504 ret = DECL_RESULT (fndecl);
7505 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
7506 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
7507 && !needs_to_live_in_memory (ret))
7508 DECL_GIMPLE_REG_P (ret) = 1;
7510 bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
7512 /* The tree body of the function is no longer needed, replace it
7513 with the new GIMPLE body. */
7514 seq = gimple_seq_alloc ();
7515 gimple_seq_add_stmt (&seq, bind);
7516 gimple_set_body (fndecl, seq);
7518 /* If we're instrumenting function entry/exit, then prepend the call to
7519 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
7520 catch the exit hook. */
7521 /* ??? Add some way to ignore exceptions for this TFE. */
7522 if (flag_instrument_function_entry_exit
7523 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
7524 && !flag_instrument_functions_exclude_p (fndecl))
7529 gimple_seq cleanup = NULL, body = NULL;
7531 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
7532 gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
7533 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
7535 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
7536 gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
7537 gimplify_seq_add_stmt (&body, tf);
7538 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
7539 /* Clear the block for BIND, since it is no longer directly inside
7540 the function, but within a try block. */
7541 gimple_bind_set_block (bind, NULL);
7543 /* Replace the current function body with the body
7544 wrapped in the try/finally TF. */
7545 seq = gimple_seq_alloc ();
7546 gimple_seq_add_stmt (&seq, new_bind);
7547 gimple_set_body (fndecl, seq);
7550 DECL_SAVED_TREE (fndecl) = NULL_TREE;
7552 current_function_decl = oldfn;
7557 /* Some transformations like inlining may invalidate the GIMPLE form
7558 for operands. This function traverses all the operands in STMT and
7559 gimplifies anything that is not a valid gimple operand. Any new
7560 GIMPLE statements are inserted before *GSI_P. */
7563 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
7566 tree orig_lhs = NULL_TREE, lhs, t;
7567 gimple_seq pre = NULL;
7568 gimple post_stmt = NULL;
7569 struct gimplify_ctx gctx;
7571 push_gimplify_context (&gctx);
7572 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7574 switch (gimple_code (stmt))
7577 gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
7578 is_gimple_val, fb_rvalue);
7579 gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
7580 is_gimple_val, fb_rvalue);
7583 gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
7584 is_gimple_val, fb_rvalue);
7586 case GIMPLE_OMP_ATOMIC_LOAD:
7587 gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
7588 is_gimple_val, fb_rvalue);
7592 size_t i, noutputs = gimple_asm_noutputs (stmt);
7593 const char *constraint, **oconstraints;
7594 bool allows_mem, allows_reg, is_inout;
7597 = (const char **) alloca ((noutputs) * sizeof (const char *));
7598 for (i = 0; i < noutputs; i++)
7600 tree op = gimple_asm_output_op (stmt, i);
7601 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7602 oconstraints[i] = constraint;
7603 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
7604 &allows_reg, &is_inout);
7605 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7606 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
7607 fb_lvalue | fb_mayfail);
7609 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
7611 tree op = gimple_asm_input_op (stmt, i);
7612 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
7613 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
7614 oconstraints, &allows_mem, &allows_reg);
7615 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
7617 if (!allows_reg && allows_mem)
7618 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7619 is_gimple_lvalue, fb_lvalue | fb_mayfail);
7621 gimplify_expr (&TREE_VALUE (op), &pre, NULL,
7622 is_gimple_asm_val, fb_rvalue);
7627 /* NOTE: We start gimplifying operands from last to first to
7628 make sure that side-effects on the RHS of calls, assignments
7629 and ASMs are executed before the LHS. The ordering is not
7630 important for other statements. */
7631 num_ops = gimple_num_ops (stmt);
7632 orig_lhs = gimple_get_lhs (stmt);
7633 for (i = num_ops; i > 0; i--)
7635 tree op = gimple_op (stmt, i - 1);
7636 if (op == NULL_TREE)
7638 if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
7639 gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
7641 && is_gimple_assign (stmt)
7643 && get_gimple_rhs_class (gimple_expr_code (stmt))
7644 == GIMPLE_SINGLE_RHS)
7645 gimplify_expr (&op, &pre, NULL,
7646 rhs_predicate_for (gimple_assign_lhs (stmt)),
7648 else if (i == 2 && is_gimple_call (stmt))
7650 if (TREE_CODE (op) == FUNCTION_DECL)
7652 gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
7655 gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
7656 gimple_set_op (stmt, i - 1, op);
7659 lhs = gimple_get_lhs (stmt);
7660 /* If the LHS changed it in a way that requires a simple RHS,
7661 create temporary. */
7662 if (lhs && !is_gimple_reg (lhs))
7664 bool need_temp = false;
7666 if (is_gimple_assign (stmt)
7668 && get_gimple_rhs_class (gimple_expr_code (stmt))
7669 == GIMPLE_SINGLE_RHS)
7670 gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
7671 rhs_predicate_for (gimple_assign_lhs (stmt)),
7673 else if (is_gimple_reg (lhs))
7675 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7677 if (is_gimple_call (stmt))
7679 i = gimple_call_flags (stmt);
7680 if ((i & ECF_LOOPING_CONST_OR_PURE)
7681 || !(i & (ECF_CONST | ECF_PURE)))
7684 if (stmt_can_throw_internal (stmt))
7690 if (is_gimple_reg_type (TREE_TYPE (lhs)))
7692 else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
7694 if (is_gimple_call (stmt))
7696 tree fndecl = gimple_call_fndecl (stmt);
7698 if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
7699 && !(fndecl && DECL_RESULT (fndecl)
7700 && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
7709 tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
7711 if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
7712 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
7713 DECL_GIMPLE_REG_P (temp) = 1;
7714 if (TREE_CODE (orig_lhs) == SSA_NAME)
7715 orig_lhs = SSA_NAME_VAR (orig_lhs);
7717 if (gimple_in_ssa_p (cfun))
7718 temp = make_ssa_name (temp, NULL);
7719 gimple_set_lhs (stmt, temp);
7720 post_stmt = gimple_build_assign (lhs, temp);
7721 if (TREE_CODE (lhs) == SSA_NAME)
7722 SSA_NAME_DEF_STMT (lhs) = post_stmt;
7728 if (gimple_referenced_vars (cfun))
7729 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7730 add_referenced_var (t);
7732 if (!gimple_seq_empty_p (pre))
7734 if (gimple_in_ssa_p (cfun))
7736 gimple_stmt_iterator i;
7738 for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
7739 mark_symbols_for_renaming (gsi_stmt (i));
7741 gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
7744 gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
7746 pop_gimplify_context (NULL);
7750 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
7751 force the result to be either ssa_name or an invariant, otherwise
7752 just force it to be a rhs expression. If VAR is not NULL, make the
7753 base variable of the final destination be VAR if suitable. */
7756 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
7759 enum gimplify_status ret;
7760 gimple_predicate gimple_test_f;
7761 struct gimplify_ctx gctx;
7765 if (is_gimple_val (expr))
7768 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
7770 push_gimplify_context (&gctx);
7771 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
7772 gimplify_ctxp->allow_rhs_cond_expr = true;
7775 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
7777 if (TREE_CODE (expr) != MODIFY_EXPR
7778 && TREE_TYPE (expr) == void_type_node)
7780 gimplify_and_add (expr, stmts);
7785 ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
7786 gcc_assert (ret != GS_ERROR);
7789 if (gimple_referenced_vars (cfun))
7790 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
7791 add_referenced_var (t);
7793 pop_gimplify_context (NULL);
7798 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
7799 some statements are produced, emits them at GSI. If BEFORE is true.
7800 the statements are appended before GSI, otherwise they are appended after
7801 it. M specifies the way GSI moves after insertion (GSI_SAME_STMT or
7802 GSI_CONTINUE_LINKING are the usual values). */
7805 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
7806 bool simple_p, tree var, bool before,
7807 enum gsi_iterator_update m)
7811 expr = force_gimple_operand (expr, &stmts, simple_p, var);
7813 if (!gimple_seq_empty_p (stmts))
7815 if (gimple_in_ssa_p (cfun))
7817 gimple_stmt_iterator i;
7819 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
7820 mark_symbols_for_renaming (gsi_stmt (i));
7824 gsi_insert_seq_before (gsi, stmts, m);
7826 gsi_insert_seq_after (gsi, stmts, m);
7832 #include "gt-gimplify.h"