1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
53 /* Global type comparison cache. This is by TYPE_UID for space efficiency
54 and thus cannot use and does not need GC. */
55 static htab_t gtc_visited;
56 static struct obstack gtc_ob;
58 /* All the tuples have their operand vector (if present) at the very bottom
59 of the structure. Therefore, the offset required to find the
60 operands vector the size of the structure minus the size of the 1
61 element tree array at the end (see gimple_ops). */
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
63 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
64 EXPORTED_CONST size_t gimple_ops_offset_[] = {
65 #include "gsstruct.def"
69 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
70 static const size_t gsstruct_code_size[] = {
71 #include "gsstruct.def"
75 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
76 const char *const gimple_code_name[] = {
81 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
82 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
87 #ifdef GATHER_STATISTICS
90 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
91 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
93 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
94 static const char * const gimple_alloc_kind_names[] = {
102 #endif /* GATHER_STATISTICS */
104 /* A cache of gimple_seq objects. Sequences are created and destroyed
105 fairly often during gimplification. */
106 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
108 /* Private API manipulation functions shared only with some
110 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
111 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
113 /* Gimple tuple constructors.
114 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
115 be passed a NULL to start with an empty sequence. */
117 /* Set the code for statement G to CODE. */
120 gimple_set_code (gimple g, enum gimple_code code)
122 g->gsbase.code = code;
125 /* Return the number of bytes needed to hold a GIMPLE statement with
129 gimple_size (enum gimple_code code)
131 return gsstruct_code_size[gss_for_code (code)];
134 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
138 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
143 size = gimple_size (code);
145 size += sizeof (tree) * (num_ops - 1);
147 #ifdef GATHER_STATISTICS
149 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
150 gimple_alloc_counts[(int) kind]++;
151 gimple_alloc_sizes[(int) kind] += size;
155 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
156 gimple_set_code (stmt, code);
157 gimple_set_num_ops (stmt, num_ops);
159 /* Do not call gimple_set_modified here as it has other side
160 effects and this tuple is still not completely built. */
161 stmt->gsbase.modified = 1;
166 /* Set SUBCODE to be the code of the expression computed by statement G. */
169 gimple_set_subcode (gimple g, unsigned subcode)
171 /* We only have 16 bits for the RHS code. Assert that we are not
173 gcc_assert (subcode < (1 << 16));
174 g->gsbase.subcode = subcode;
179 /* Build a tuple with operands. CODE is the statement to build (which
180 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
181 for the new tuple. NUM_OPS is the number of operands to allocate. */
183 #define gimple_build_with_ops(c, s, n) \
184 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
187 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
188 unsigned num_ops MEM_STAT_DECL)
190 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
191 gimple_set_subcode (s, subcode);
197 /* Build a GIMPLE_RETURN statement returning RETVAL. */
200 gimple_build_return (tree retval)
202 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
204 gimple_return_set_retval (s, retval);
208 /* Reset alias information on call S. */
211 gimple_call_reset_alias_info (gimple s)
213 if (gimple_call_flags (s) & ECF_CONST)
214 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
216 pt_solution_reset (gimple_call_use_set (s));
217 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
218 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
220 pt_solution_reset (gimple_call_clobber_set (s));
223 /* Helper for gimple_build_call, gimple_build_call_vec and
224 gimple_build_call_from_tree. Build the basic components of a
225 GIMPLE_CALL statement to function FN with NARGS arguments. */
228 gimple_build_call_1 (tree fn, unsigned nargs)
230 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
231 if (TREE_CODE (fn) == FUNCTION_DECL)
232 fn = build_fold_addr_expr (fn);
233 gimple_set_op (s, 1, fn);
234 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235 gimple_call_reset_alias_info (s);
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241 specified in vector ARGS. */
244 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
247 unsigned nargs = VEC_length (tree, args);
248 gimple call = gimple_build_call_1 (fn, nargs);
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
257 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
258 arguments. The ... are the arguments. */
261 gimple_build_call (tree fn, unsigned nargs, ...)
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
269 call = gimple_build_call_1 (fn, nargs);
271 va_start (ap, nargs);
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
280 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
281 Build the basic components of a GIMPLE_CALL statement to internal
282 function FN with NARGS arguments. */
285 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
287 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
288 s->gsbase.subcode |= GF_CALL_INTERNAL;
289 gimple_call_set_internal_fn (s, fn);
290 gimple_call_reset_alias_info (s);
295 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
296 the number of arguments. The ... are the arguments. */
299 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
305 call = gimple_build_call_internal_1 (fn, nargs);
306 va_start (ap, nargs);
307 for (i = 0; i < nargs; i++)
308 gimple_call_set_arg (call, i, va_arg (ap, tree));
315 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
316 specified in vector ARGS. */
319 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
324 nargs = VEC_length (tree, args);
325 call = gimple_build_call_internal_1 (fn, nargs);
326 for (i = 0; i < nargs; i++)
327 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
333 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
334 assumed to be in GIMPLE form already. Minimal checking is done of
338 gimple_build_call_from_tree (tree t)
342 tree fndecl = get_callee_fndecl (t);
344 gcc_assert (TREE_CODE (t) == CALL_EXPR);
346 nargs = call_expr_nargs (t);
347 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
349 for (i = 0; i < nargs; i++)
350 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
352 gimple_set_block (call, TREE_BLOCK (t));
354 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
355 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
356 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
357 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
361 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
362 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
364 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
365 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
366 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
367 gimple_set_no_warning (call, TREE_NO_WARNING (t));
373 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
374 *OP1_P, *OP2_P and *OP3_P respectively. */
377 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
378 tree *op2_p, tree *op3_p)
380 enum gimple_rhs_class grhs_class;
382 *subcode_p = TREE_CODE (expr);
383 grhs_class = get_gimple_rhs_class (*subcode_p);
385 if (grhs_class == GIMPLE_TERNARY_RHS)
387 *op1_p = TREE_OPERAND (expr, 0);
388 *op2_p = TREE_OPERAND (expr, 1);
389 *op3_p = TREE_OPERAND (expr, 2);
391 else if (grhs_class == GIMPLE_BINARY_RHS)
393 *op1_p = TREE_OPERAND (expr, 0);
394 *op2_p = TREE_OPERAND (expr, 1);
397 else if (grhs_class == GIMPLE_UNARY_RHS)
399 *op1_p = TREE_OPERAND (expr, 0);
403 else if (grhs_class == GIMPLE_SINGLE_RHS)
414 /* Build a GIMPLE_ASSIGN statement.
416 LHS of the assignment.
417 RHS of the assignment which can be unary or binary. */
420 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
422 enum tree_code subcode;
425 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
426 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
431 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
432 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
433 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
436 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
437 tree op2, tree op3 MEM_STAT_DECL)
442 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
444 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
446 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
448 gimple_assign_set_lhs (p, lhs);
449 gimple_assign_set_rhs1 (p, op1);
452 gcc_assert (num_ops > 2);
453 gimple_assign_set_rhs2 (p, op2);
458 gcc_assert (num_ops > 3);
459 gimple_assign_set_rhs3 (p, op3);
466 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
468 DST/SRC are the destination and source respectively. You can pass
469 ungimplified trees in DST or SRC, in which case they will be
470 converted to a gimple operand if necessary.
472 This function returns the newly created GIMPLE_ASSIGN tuple. */
475 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
477 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
478 gimplify_and_add (t, seq_p);
480 return gimple_seq_last_stmt (*seq_p);
484 /* Build a GIMPLE_COND statement.
486 PRED is the condition used to compare LHS and the RHS.
487 T_LABEL is the label to jump to if the condition is true.
488 F_LABEL is the label to jump to otherwise. */
491 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
492 tree t_label, tree f_label)
496 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
497 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
498 gimple_cond_set_lhs (p, lhs);
499 gimple_cond_set_rhs (p, rhs);
500 gimple_cond_set_true_label (p, t_label);
501 gimple_cond_set_false_label (p, f_label);
506 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
509 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
510 tree *lhs_p, tree *rhs_p)
512 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
513 || TREE_CODE (cond) == TRUTH_NOT_EXPR
514 || is_gimple_min_invariant (cond)
515 || SSA_VAR_P (cond));
517 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
519 /* Canonicalize conditionals of the form 'if (!VAL)'. */
520 if (*code_p == TRUTH_NOT_EXPR)
523 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
524 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
526 /* Canonicalize conditionals of the form 'if (VAL)' */
527 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
530 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
531 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
536 /* Build a GIMPLE_COND statement from the conditional expression tree
537 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
540 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
545 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
546 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
549 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
550 boolean expression tree COND. */
553 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
558 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
559 gimple_cond_set_condition (stmt, code, lhs, rhs);
562 /* Build a GIMPLE_LABEL statement for LABEL. */
565 gimple_build_label (tree label)
567 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
568 gimple_label_set_label (p, label);
572 /* Build a GIMPLE_GOTO statement to label DEST. */
575 gimple_build_goto (tree dest)
577 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
578 gimple_goto_set_dest (p, dest);
583 /* Build a GIMPLE_NOP statement. */
586 gimple_build_nop (void)
588 return gimple_alloc (GIMPLE_NOP, 0);
592 /* Build a GIMPLE_BIND statement.
593 VARS are the variables in BODY.
594 BLOCK is the containing block. */
597 gimple_build_bind (tree vars, gimple_seq body, tree block)
599 gimple p = gimple_alloc (GIMPLE_BIND, 0);
600 gimple_bind_set_vars (p, vars);
602 gimple_bind_set_body (p, body);
604 gimple_bind_set_block (p, block);
608 /* Helper function to set the simple fields of a asm stmt.
610 STRING is a pointer to a string that is the asm blocks assembly code.
611 NINPUT is the number of register inputs.
612 NOUTPUT is the number of register outputs.
613 NCLOBBERS is the number of clobbered registers.
617 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
618 unsigned nclobbers, unsigned nlabels)
621 int size = strlen (string);
623 /* ASMs with labels cannot have outputs. This should have been
624 enforced by the front end. */
625 gcc_assert (nlabels == 0 || noutputs == 0);
627 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
628 ninputs + noutputs + nclobbers + nlabels);
630 p->gimple_asm.ni = ninputs;
631 p->gimple_asm.no = noutputs;
632 p->gimple_asm.nc = nclobbers;
633 p->gimple_asm.nl = nlabels;
634 p->gimple_asm.string = ggc_alloc_string (string, size);
636 #ifdef GATHER_STATISTICS
637 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
643 /* Build a GIMPLE_ASM statement.
645 STRING is the assembly code.
646 NINPUT is the number of register inputs.
647 NOUTPUT is the number of register outputs.
648 NCLOBBERS is the number of clobbered registers.
649 INPUTS is a vector of the input register parameters.
650 OUTPUTS is a vector of the output register parameters.
651 CLOBBERS is a vector of the clobbered register parameters.
652 LABELS is a vector of destination labels. */
655 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
656 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
657 VEC(tree,gc)* labels)
662 p = gimple_build_asm_1 (string,
663 VEC_length (tree, inputs),
664 VEC_length (tree, outputs),
665 VEC_length (tree, clobbers),
666 VEC_length (tree, labels));
668 for (i = 0; i < VEC_length (tree, inputs); i++)
669 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
671 for (i = 0; i < VEC_length (tree, outputs); i++)
672 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
674 for (i = 0; i < VEC_length (tree, clobbers); i++)
675 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
677 for (i = 0; i < VEC_length (tree, labels); i++)
678 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
683 /* Build a GIMPLE_CATCH statement.
685 TYPES are the catch types.
686 HANDLER is the exception handler. */
689 gimple_build_catch (tree types, gimple_seq handler)
691 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
692 gimple_catch_set_types (p, types);
694 gimple_catch_set_handler (p, handler);
699 /* Build a GIMPLE_EH_FILTER statement.
701 TYPES are the filter's types.
702 FAILURE is the filter's failure action. */
705 gimple_build_eh_filter (tree types, gimple_seq failure)
707 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
708 gimple_eh_filter_set_types (p, types);
710 gimple_eh_filter_set_failure (p, failure);
715 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
718 gimple_build_eh_must_not_throw (tree decl)
720 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
722 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
723 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
724 gimple_eh_must_not_throw_set_fndecl (p, decl);
729 /* Build a GIMPLE_TRY statement.
731 EVAL is the expression to evaluate.
732 CLEANUP is the cleanup expression.
733 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
734 whether this is a try/catch or a try/finally respectively. */
737 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
738 enum gimple_try_flags kind)
742 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
743 p = gimple_alloc (GIMPLE_TRY, 0);
744 gimple_set_subcode (p, kind);
746 gimple_try_set_eval (p, eval);
748 gimple_try_set_cleanup (p, cleanup);
753 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
755 CLEANUP is the cleanup expression. */
758 gimple_build_wce (gimple_seq cleanup)
760 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
762 gimple_wce_set_cleanup (p, cleanup);
768 /* Build a GIMPLE_RESX statement. */
771 gimple_build_resx (int region)
773 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
774 p->gimple_eh_ctrl.region = region;
779 /* The helper for constructing a gimple switch statement.
780 INDEX is the switch's index.
781 NLABELS is the number of labels in the switch excluding the default.
782 DEFAULT_LABEL is the default label for the switch statement. */
785 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
787 /* nlabels + 1 default label + 1 index. */
788 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
789 1 + (default_label != NULL) + nlabels);
790 gimple_switch_set_index (p, index);
792 gimple_switch_set_default_label (p, default_label);
797 /* Build a GIMPLE_SWITCH statement.
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
801 ... are the labels excluding the default. */
804 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
808 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
810 /* Store the rest of the labels. */
811 va_start (al, default_label);
812 offset = (default_label != NULL);
813 for (i = 0; i < nlabels; i++)
814 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
821 /* Build a GIMPLE_SWITCH statement.
823 INDEX is the switch's index.
824 DEFAULT_LABEL is the default label
825 ARGS is a vector of labels excluding the default. */
828 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
830 unsigned i, offset, nlabels = VEC_length (tree, args);
831 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
833 /* Copy the labels from the vector to the switch statement. */
834 offset = (default_label != NULL);
835 for (i = 0; i < nlabels; i++)
836 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
841 /* Build a GIMPLE_EH_DISPATCH statement. */
844 gimple_build_eh_dispatch (int region)
846 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
847 p->gimple_eh_ctrl.region = region;
851 /* Build a new GIMPLE_DEBUG_BIND statement.
853 VAR is bound to VALUE; block and location are taken from STMT. */
856 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
858 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
859 (unsigned)GIMPLE_DEBUG_BIND, 2
862 gimple_debug_bind_set_var (p, var);
863 gimple_debug_bind_set_value (p, value);
866 gimple_set_block (p, gimple_block (stmt));
867 gimple_set_location (p, gimple_location (stmt));
874 /* Build a GIMPLE_OMP_CRITICAL statement.
876 BODY is the sequence of statements for which only one thread can execute.
877 NAME is optional identifier for this critical block. */
880 gimple_build_omp_critical (gimple_seq body, tree name)
882 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
883 gimple_omp_critical_set_name (p, name);
885 gimple_omp_set_body (p, body);
890 /* Build a GIMPLE_OMP_FOR statement.
892 BODY is sequence of statements inside the for loop.
893 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
894 lastprivate, reductions, ordered, schedule, and nowait.
895 COLLAPSE is the collapse count.
896 PRE_BODY is the sequence of statements that are loop invariant. */
899 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
902 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
904 gimple_omp_set_body (p, body);
905 gimple_omp_for_set_clauses (p, clauses);
906 p->gimple_omp_for.collapse = collapse;
907 p->gimple_omp_for.iter
908 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
910 gimple_omp_for_set_pre_body (p, pre_body);
916 /* Build a GIMPLE_OMP_PARALLEL statement.
918 BODY is sequence of statements which are executed in parallel.
919 CLAUSES, are the OMP parallel construct's clauses.
920 CHILD_FN is the function created for the parallel threads to execute.
921 DATA_ARG are the shared data argument(s). */
924 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
927 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
929 gimple_omp_set_body (p, body);
930 gimple_omp_parallel_set_clauses (p, clauses);
931 gimple_omp_parallel_set_child_fn (p, child_fn);
932 gimple_omp_parallel_set_data_arg (p, data_arg);
938 /* Build a GIMPLE_OMP_TASK statement.
940 BODY is sequence of statements which are executed by the explicit task.
941 CLAUSES, are the OMP parallel construct's clauses.
942 CHILD_FN is the function created for the parallel threads to execute.
943 DATA_ARG are the shared data argument(s).
944 COPY_FN is the optional function for firstprivate initialization.
945 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
948 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
949 tree data_arg, tree copy_fn, tree arg_size,
952 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
954 gimple_omp_set_body (p, body);
955 gimple_omp_task_set_clauses (p, clauses);
956 gimple_omp_task_set_child_fn (p, child_fn);
957 gimple_omp_task_set_data_arg (p, data_arg);
958 gimple_omp_task_set_copy_fn (p, copy_fn);
959 gimple_omp_task_set_arg_size (p, arg_size);
960 gimple_omp_task_set_arg_align (p, arg_align);
966 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
968 BODY is the sequence of statements in the section. */
971 gimple_build_omp_section (gimple_seq body)
973 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
975 gimple_omp_set_body (p, body);
981 /* Build a GIMPLE_OMP_MASTER statement.
983 BODY is the sequence of statements to be executed by just the master. */
986 gimple_build_omp_master (gimple_seq body)
988 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
990 gimple_omp_set_body (p, body);
996 /* Build a GIMPLE_OMP_CONTINUE statement.
998 CONTROL_DEF is the definition of the control variable.
999 CONTROL_USE is the use of the control variable. */
1002 gimple_build_omp_continue (tree control_def, tree control_use)
1004 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1005 gimple_omp_continue_set_control_def (p, control_def);
1006 gimple_omp_continue_set_control_use (p, control_use);
1010 /* Build a GIMPLE_OMP_ORDERED statement.
1012 BODY is the sequence of statements inside a loop that will executed in
1016 gimple_build_omp_ordered (gimple_seq body)
1018 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1020 gimple_omp_set_body (p, body);
1026 /* Build a GIMPLE_OMP_RETURN statement.
1027 WAIT_P is true if this is a non-waiting return. */
1030 gimple_build_omp_return (bool wait_p)
1032 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1034 gimple_omp_return_set_nowait (p);
1040 /* Build a GIMPLE_OMP_SECTIONS statement.
1042 BODY is a sequence of section statements.
1043 CLAUSES are any of the OMP sections contsruct's clauses: private,
1044 firstprivate, lastprivate, reduction, and nowait. */
1047 gimple_build_omp_sections (gimple_seq body, tree clauses)
1049 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1051 gimple_omp_set_body (p, body);
1052 gimple_omp_sections_set_clauses (p, clauses);
1058 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1061 gimple_build_omp_sections_switch (void)
1063 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1067 /* Build a GIMPLE_OMP_SINGLE statement.
1069 BODY is the sequence of statements that will be executed once.
1070 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1071 copyprivate, nowait. */
1074 gimple_build_omp_single (gimple_seq body, tree clauses)
1076 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1078 gimple_omp_set_body (p, body);
1079 gimple_omp_single_set_clauses (p, clauses);
1085 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1088 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1090 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1091 gimple_omp_atomic_load_set_lhs (p, lhs);
1092 gimple_omp_atomic_load_set_rhs (p, rhs);
1096 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1098 VAL is the value we are storing. */
1101 gimple_build_omp_atomic_store (tree val)
1103 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1104 gimple_omp_atomic_store_set_val (p, val);
1108 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1109 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1112 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1114 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1115 /* Ensure all the predictors fit into the lower bits of the subcode. */
1116 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1117 gimple_predict_set_predictor (p, predictor);
1118 gimple_predict_set_outcome (p, outcome);
1122 #if defined ENABLE_GIMPLE_CHECKING
1123 /* Complain of a gimple type mismatch and die. */
1126 gimple_check_failed (const_gimple gs, const char *file, int line,
1127 const char *function, enum gimple_code code,
1128 enum tree_code subcode)
1130 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1131 gimple_code_name[code],
1132 tree_code_name[subcode],
1133 gimple_code_name[gimple_code (gs)],
1134 gs->gsbase.subcode > 0
1135 ? tree_code_name[gs->gsbase.subcode]
1137 function, trim_filename (file), line);
1139 #endif /* ENABLE_GIMPLE_CHECKING */
1142 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1143 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1147 gimple_seq_alloc (void)
1149 gimple_seq seq = gimple_seq_cache;
1152 gimple_seq_cache = gimple_seq_cache->next_free;
1153 gcc_assert (gimple_seq_cache != seq);
1154 memset (seq, 0, sizeof (*seq));
1158 seq = ggc_alloc_cleared_gimple_seq_d ();
1159 #ifdef GATHER_STATISTICS
1160 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1161 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1168 /* Return SEQ to the free pool of GIMPLE sequences. */
1171 gimple_seq_free (gimple_seq seq)
1176 gcc_assert (gimple_seq_first (seq) == NULL);
1177 gcc_assert (gimple_seq_last (seq) == NULL);
1179 /* If this triggers, it's a sign that the same list is being freed
1181 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1183 /* Add SEQ to the pool of free sequences. */
1184 seq->next_free = gimple_seq_cache;
1185 gimple_seq_cache = seq;
1189 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1190 *SEQ_P is NULL, a new sequence is allocated. */
1193 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1195 gimple_stmt_iterator si;
1201 *seq_p = gimple_seq_alloc ();
1203 si = gsi_last (*seq_p);
1204 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1208 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1209 NULL, a new sequence is allocated. */
1212 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1214 gimple_stmt_iterator si;
1220 *dst_p = gimple_seq_alloc ();
1222 si = gsi_last (*dst_p);
1223 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1227 /* Helper function of empty_body_p. Return true if STMT is an empty
1231 empty_stmt_p (gimple stmt)
1233 if (gimple_code (stmt) == GIMPLE_NOP)
1235 if (gimple_code (stmt) == GIMPLE_BIND)
1236 return empty_body_p (gimple_bind_body (stmt));
1241 /* Return true if BODY contains nothing but empty statements. */
1244 empty_body_p (gimple_seq body)
1246 gimple_stmt_iterator i;
1248 if (gimple_seq_empty_p (body))
1250 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1251 if (!empty_stmt_p (gsi_stmt (i))
1252 && !is_gimple_debug (gsi_stmt (i)))
1259 /* Perform a deep copy of sequence SRC and return the result. */
1262 gimple_seq_copy (gimple_seq src)
1264 gimple_stmt_iterator gsi;
1265 gimple_seq new_seq = gimple_seq_alloc ();
1268 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1270 stmt = gimple_copy (gsi_stmt (gsi));
1271 gimple_seq_add_stmt (&new_seq, stmt);
1278 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1279 on each one. WI is as in walk_gimple_stmt.
1281 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1282 value is stored in WI->CALLBACK_RESULT and the statement that
1283 produced the value is returned.
1285 Otherwise, all the statements are walked and NULL returned. */
1288 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1289 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1291 gimple_stmt_iterator gsi;
1293 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1295 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1298 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1301 wi->callback_result = ret;
1302 return gsi_stmt (gsi);
1307 wi->callback_result = NULL_TREE;
1313 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1316 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1317 struct walk_stmt_info *wi)
1321 const char **oconstraints;
1323 const char *constraint;
1324 bool allows_mem, allows_reg, is_inout;
1326 noutputs = gimple_asm_noutputs (stmt);
1327 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1332 for (i = 0; i < noutputs; i++)
1334 op = gimple_asm_output_op (stmt, i);
1335 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1336 oconstraints[i] = constraint;
1337 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1340 wi->val_only = (allows_reg || !allows_mem);
1341 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1346 n = gimple_asm_ninputs (stmt);
1347 for (i = 0; i < n; i++)
1349 op = gimple_asm_input_op (stmt, i);
1350 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1351 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1352 oconstraints, &allows_mem, &allows_reg);
1355 wi->val_only = (allows_reg || !allows_mem);
1356 /* Although input "m" is not really a LHS, we need a lvalue. */
1357 wi->is_lhs = !wi->val_only;
1359 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1367 wi->val_only = true;
1370 n = gimple_asm_nlabels (stmt);
1371 for (i = 0; i < n; i++)
1373 op = gimple_asm_label_op (stmt, i);
1374 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1383 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1384 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1386 CALLBACK_OP is called on each operand of STMT via walk_tree.
1387 Additional parameters to walk_tree must be stored in WI. For each operand
1388 OP, walk_tree is called as:
1390 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1392 If CALLBACK_OP returns non-NULL for an operand, the remaining
1393 operands are not scanned.
1395 The return value is that returned by the last call to walk_tree, or
1396 NULL_TREE if no CALLBACK_OP is specified. */
1399 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1400 struct walk_stmt_info *wi)
1402 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1404 tree ret = NULL_TREE;
1406 switch (gimple_code (stmt))
1409 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1410 is a register variable, we may use a COMPONENT_REF on the RHS. */
1413 tree lhs = gimple_assign_lhs (stmt);
1415 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1416 || !gimple_assign_single_p (stmt);
1419 for (i = 1; i < gimple_num_ops (stmt); i++)
1421 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1427 /* Walk the LHS. If the RHS is appropriate for a memory, we
1428 may use a COMPONENT_REF on the LHS. */
1431 /* If the RHS has more than 1 operand, it is not appropriate
1433 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1434 || !gimple_assign_single_p (stmt);
1438 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1444 wi->val_only = true;
1453 wi->val_only = true;
1456 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1460 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1464 for (i = 0; i < gimple_call_num_args (stmt); i++)
1468 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1469 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1475 if (gimple_call_lhs (stmt))
1481 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1484 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1492 wi->val_only = true;
1497 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1503 case GIMPLE_EH_FILTER:
1504 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1511 ret = walk_gimple_asm (stmt, callback_op, wi);
1516 case GIMPLE_OMP_CONTINUE:
1517 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1518 callback_op, wi, pset);
1522 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1523 callback_op, wi, pset);
1528 case GIMPLE_OMP_CRITICAL:
1529 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1535 case GIMPLE_OMP_FOR:
1536 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1540 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1542 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1546 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1550 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1554 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1561 case GIMPLE_OMP_PARALLEL:
1562 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1566 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1570 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1576 case GIMPLE_OMP_TASK:
1577 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1581 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1585 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1589 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1593 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1597 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1603 case GIMPLE_OMP_SECTIONS:
1604 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1609 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1616 case GIMPLE_OMP_SINGLE:
1617 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1623 case GIMPLE_OMP_ATOMIC_LOAD:
1624 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1629 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1635 case GIMPLE_OMP_ATOMIC_STORE:
1636 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1642 /* Tuples that do not have operands. */
1645 case GIMPLE_OMP_RETURN:
1646 case GIMPLE_PREDICT:
1651 enum gimple_statement_structure_enum gss;
1652 gss = gimple_statement_structure (stmt);
1653 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1654 for (i = 0; i < gimple_num_ops (stmt); i++)
1656 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1668 /* Walk the current statement in GSI (optionally using traversal state
1669 stored in WI). If WI is NULL, no state is kept during traversal.
1670 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1671 that it has handled all the operands of the statement, its return
1672 value is returned. Otherwise, the return value from CALLBACK_STMT
1673 is discarded and its operands are scanned.
1675 If CALLBACK_STMT is NULL or it didn't handle the operands,
1676 CALLBACK_OP is called on each operand of the statement via
1677 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1678 operand, the remaining operands are not scanned. In this case, the
1679 return value from CALLBACK_OP is returned.
1681 In any other case, NULL_TREE is returned. */
1684 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1685 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1689 gimple stmt = gsi_stmt (*gsi);
1694 if (wi && wi->want_locations && gimple_has_location (stmt))
1695 input_location = gimple_location (stmt);
1699 /* Invoke the statement callback. Return if the callback handled
1700 all of STMT operands by itself. */
1703 bool handled_ops = false;
1704 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1708 /* If CALLBACK_STMT did not handle operands, it should not have
1709 a value to return. */
1710 gcc_assert (tree_ret == NULL);
1712 /* Re-read stmt in case the callback changed it. */
1713 stmt = gsi_stmt (*gsi);
1716 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1719 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1724 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1725 switch (gimple_code (stmt))
1728 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1731 return wi->callback_result;
1735 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1738 return wi->callback_result;
1741 case GIMPLE_EH_FILTER:
1742 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1745 return wi->callback_result;
1749 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1752 return wi->callback_result;
1754 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1757 return wi->callback_result;
1760 case GIMPLE_OMP_FOR:
1761 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1764 return wi->callback_result;
1767 case GIMPLE_OMP_CRITICAL:
1768 case GIMPLE_OMP_MASTER:
1769 case GIMPLE_OMP_ORDERED:
1770 case GIMPLE_OMP_SECTION:
1771 case GIMPLE_OMP_PARALLEL:
1772 case GIMPLE_OMP_TASK:
1773 case GIMPLE_OMP_SECTIONS:
1774 case GIMPLE_OMP_SINGLE:
1775 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1778 return wi->callback_result;
1781 case GIMPLE_WITH_CLEANUP_EXPR:
1782 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1785 return wi->callback_result;
1789 gcc_assert (!gimple_has_substatements (stmt));
1797 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1800 gimple_set_body (tree fndecl, gimple_seq seq)
1802 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1805 /* If FNDECL still does not have a function structure associated
1806 with it, then it does not make sense for it to receive a
1808 gcc_assert (seq == NULL);
1811 fn->gimple_body = seq;
1815 /* Return the body of GIMPLE statements for function FN. After the
1816 CFG pass, the function body doesn't exist anymore because it has
1817 been split up into basic blocks. In this case, it returns
1821 gimple_body (tree fndecl)
1823 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1824 return fn ? fn->gimple_body : NULL;
1827 /* Return true when FNDECL has Gimple body either in unlowered
1830 gimple_has_body_p (tree fndecl)
1832 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1833 return (gimple_body (fndecl) || (fn && fn->cfg));
1836 /* Return true if calls C1 and C2 are known to go to the same function. */
1839 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1841 if (gimple_call_internal_p (c1))
1842 return (gimple_call_internal_p (c2)
1843 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1845 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1846 || (gimple_call_fndecl (c1)
1847 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1850 /* Detect flags from a GIMPLE_CALL. This is just like
1851 call_expr_flags, but for gimple tuples. */
1854 gimple_call_flags (const_gimple stmt)
1857 tree decl = gimple_call_fndecl (stmt);
1860 flags = flags_from_decl_or_type (decl);
1861 else if (gimple_call_internal_p (stmt))
1862 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1864 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1866 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1867 flags |= ECF_NOTHROW;
1872 /* Return the "fn spec" string for call STMT. */
1875 gimple_call_fnspec (const_gimple stmt)
1879 type = gimple_call_fntype (stmt);
1883 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1887 return TREE_VALUE (TREE_VALUE (attr));
1890 /* Detects argument flags for argument number ARG on call STMT. */
1893 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1895 tree attr = gimple_call_fnspec (stmt);
1897 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1900 switch (TREE_STRING_POINTER (attr)[1 + arg])
1907 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1910 return EAF_NOCLOBBER | EAF_NOESCAPE;
1913 return EAF_DIRECT | EAF_NOESCAPE;
1916 return EAF_NOESCAPE;
1924 /* Detects return flags for the call STMT. */
1927 gimple_call_return_flags (const_gimple stmt)
1931 if (gimple_call_flags (stmt) & ECF_MALLOC)
1934 attr = gimple_call_fnspec (stmt);
1935 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1938 switch (TREE_STRING_POINTER (attr)[0])
1944 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1956 /* Return true if GS is a copy assignment. */
1959 gimple_assign_copy_p (gimple gs)
1961 return (gimple_assign_single_p (gs)
1962 && is_gimple_val (gimple_op (gs, 1)));
1966 /* Return true if GS is a SSA_NAME copy assignment. */
1969 gimple_assign_ssa_name_copy_p (gimple gs)
1971 return (gimple_assign_single_p (gs)
1972 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1973 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1977 /* Return true if GS is an assignment with a unary RHS, but the
1978 operator has no effect on the assigned value. The logic is adapted
1979 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1980 instances in which STRIP_NOPS was previously applied to the RHS of
1983 NOTE: In the use cases that led to the creation of this function
1984 and of gimple_assign_single_p, it is typical to test for either
1985 condition and to proceed in the same manner. In each case, the
1986 assigned value is represented by the single RHS operand of the
1987 assignment. I suspect there may be cases where gimple_assign_copy_p,
1988 gimple_assign_single_p, or equivalent logic is used where a similar
1989 treatment of unary NOPs is appropriate. */
1992 gimple_assign_unary_nop_p (gimple gs)
1994 return (is_gimple_assign (gs)
1995 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1996 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1997 && gimple_assign_rhs1 (gs) != error_mark_node
1998 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1999 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2002 /* Set BB to be the basic block holding G. */
2005 gimple_set_bb (gimple stmt, basic_block bb)
2007 stmt->gsbase.bb = bb;
2009 /* If the statement is a label, add the label to block-to-labels map
2010 so that we can speed up edge creation for GIMPLE_GOTOs. */
2011 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2016 t = gimple_label_label (stmt);
2017 uid = LABEL_DECL_UID (t);
2020 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2021 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2022 if (old_len <= (unsigned) uid)
2024 unsigned new_len = 3 * uid / 2 + 1;
2026 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2031 VEC_replace (basic_block, label_to_block_map, uid, bb);
2036 /* Modify the RHS of the assignment pointed-to by GSI using the
2037 operands in the expression tree EXPR.
2039 NOTE: The statement pointed-to by GSI may be reallocated if it
2040 did not have enough operand slots.
2042 This function is useful to convert an existing tree expression into
2043 the flat representation used for the RHS of a GIMPLE assignment.
2044 It will reallocate memory as needed to expand or shrink the number
2045 of operand slots needed to represent EXPR.
2047 NOTE: If you find yourself building a tree and then calling this
2048 function, you are most certainly doing it the slow way. It is much
2049 better to build a new assignment or to use the function
2050 gimple_assign_set_rhs_with_ops, which does not require an
2051 expression tree to be built. */
2054 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2056 enum tree_code subcode;
2059 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2060 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2064 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2065 operands OP1, OP2 and OP3.
2067 NOTE: The statement pointed-to by GSI may be reallocated if it
2068 did not have enough operand slots. */
2071 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2072 tree op1, tree op2, tree op3)
2074 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2075 gimple stmt = gsi_stmt (*gsi);
2077 /* If the new CODE needs more operands, allocate a new statement. */
2078 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2080 tree lhs = gimple_assign_lhs (stmt);
2081 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2082 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2083 gsi_replace (gsi, new_stmt, true);
2086 /* The LHS needs to be reset as this also changes the SSA name
2088 gimple_assign_set_lhs (stmt, lhs);
2091 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2092 gimple_set_subcode (stmt, code);
2093 gimple_assign_set_rhs1 (stmt, op1);
2094 if (new_rhs_ops > 1)
2095 gimple_assign_set_rhs2 (stmt, op2);
2096 if (new_rhs_ops > 2)
2097 gimple_assign_set_rhs3 (stmt, op3);
2101 /* Return the LHS of a statement that performs an assignment,
2102 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2103 for a call to a function that returns no value, or for a
2104 statement other than an assignment or a call. */
2107 gimple_get_lhs (const_gimple stmt)
2109 enum gimple_code code = gimple_code (stmt);
2111 if (code == GIMPLE_ASSIGN)
2112 return gimple_assign_lhs (stmt);
2113 else if (code == GIMPLE_CALL)
2114 return gimple_call_lhs (stmt);
2120 /* Set the LHS of a statement that performs an assignment,
2121 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2124 gimple_set_lhs (gimple stmt, tree lhs)
2126 enum gimple_code code = gimple_code (stmt);
2128 if (code == GIMPLE_ASSIGN)
2129 gimple_assign_set_lhs (stmt, lhs);
2130 else if (code == GIMPLE_CALL)
2131 gimple_call_set_lhs (stmt, lhs);
2136 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2137 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2138 expression with a different value.
2140 This will update any annotations (say debug bind stmts) referring
2141 to the original LHS, so that they use the RHS instead. This is
2142 done even if NLHS and LHS are the same, for it is understood that
2143 the RHS will be modified afterwards, and NLHS will not be assigned
2144 an equivalent value.
2146 Adjusting any non-annotation uses of the LHS, if needed, is a
2147 responsibility of the caller.
2149 The effect of this call should be pretty much the same as that of
2150 inserting a copy of STMT before STMT, and then removing the
2151 original stmt, at which time gsi_remove() would have update
2152 annotations, but using this function saves all the inserting,
2153 copying and removing. */
2156 gimple_replace_lhs (gimple stmt, tree nlhs)
2158 if (MAY_HAVE_DEBUG_STMTS)
2160 tree lhs = gimple_get_lhs (stmt);
2162 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2164 insert_debug_temp_for_var_def (NULL, lhs);
2167 gimple_set_lhs (stmt, nlhs);
2170 /* Return a deep copy of statement STMT. All the operands from STMT
2171 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2172 and VUSE operand arrays are set to empty in the new copy. */
2175 gimple_copy (gimple stmt)
2177 enum gimple_code code = gimple_code (stmt);
2178 unsigned num_ops = gimple_num_ops (stmt);
2179 gimple copy = gimple_alloc (code, num_ops);
2182 /* Shallow copy all the fields from STMT. */
2183 memcpy (copy, stmt, gimple_size (code));
2185 /* If STMT has sub-statements, deep-copy them as well. */
2186 if (gimple_has_substatements (stmt))
2191 switch (gimple_code (stmt))
2194 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2195 gimple_bind_set_body (copy, new_seq);
2196 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2197 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2201 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2202 gimple_catch_set_handler (copy, new_seq);
2203 t = unshare_expr (gimple_catch_types (stmt));
2204 gimple_catch_set_types (copy, t);
2207 case GIMPLE_EH_FILTER:
2208 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2209 gimple_eh_filter_set_failure (copy, new_seq);
2210 t = unshare_expr (gimple_eh_filter_types (stmt));
2211 gimple_eh_filter_set_types (copy, t);
2215 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2216 gimple_try_set_eval (copy, new_seq);
2217 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2218 gimple_try_set_cleanup (copy, new_seq);
2221 case GIMPLE_OMP_FOR:
2222 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2223 gimple_omp_for_set_pre_body (copy, new_seq);
2224 t = unshare_expr (gimple_omp_for_clauses (stmt));
2225 gimple_omp_for_set_clauses (copy, t);
2226 copy->gimple_omp_for.iter
2227 = ggc_alloc_vec_gimple_omp_for_iter
2228 (gimple_omp_for_collapse (stmt));
2229 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2231 gimple_omp_for_set_cond (copy, i,
2232 gimple_omp_for_cond (stmt, i));
2233 gimple_omp_for_set_index (copy, i,
2234 gimple_omp_for_index (stmt, i));
2235 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2236 gimple_omp_for_set_initial (copy, i, t);
2237 t = unshare_expr (gimple_omp_for_final (stmt, i));
2238 gimple_omp_for_set_final (copy, i, t);
2239 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2240 gimple_omp_for_set_incr (copy, i, t);
2244 case GIMPLE_OMP_PARALLEL:
2245 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2246 gimple_omp_parallel_set_clauses (copy, t);
2247 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2248 gimple_omp_parallel_set_child_fn (copy, t);
2249 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2250 gimple_omp_parallel_set_data_arg (copy, t);
2253 case GIMPLE_OMP_TASK:
2254 t = unshare_expr (gimple_omp_task_clauses (stmt));
2255 gimple_omp_task_set_clauses (copy, t);
2256 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2257 gimple_omp_task_set_child_fn (copy, t);
2258 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2259 gimple_omp_task_set_data_arg (copy, t);
2260 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2261 gimple_omp_task_set_copy_fn (copy, t);
2262 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2263 gimple_omp_task_set_arg_size (copy, t);
2264 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2265 gimple_omp_task_set_arg_align (copy, t);
2268 case GIMPLE_OMP_CRITICAL:
2269 t = unshare_expr (gimple_omp_critical_name (stmt));
2270 gimple_omp_critical_set_name (copy, t);
2273 case GIMPLE_OMP_SECTIONS:
2274 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2275 gimple_omp_sections_set_clauses (copy, t);
2276 t = unshare_expr (gimple_omp_sections_control (stmt));
2277 gimple_omp_sections_set_control (copy, t);
2280 case GIMPLE_OMP_SINGLE:
2281 case GIMPLE_OMP_SECTION:
2282 case GIMPLE_OMP_MASTER:
2283 case GIMPLE_OMP_ORDERED:
2285 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2286 gimple_omp_set_body (copy, new_seq);
2289 case GIMPLE_WITH_CLEANUP_EXPR:
2290 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2291 gimple_wce_set_cleanup (copy, new_seq);
2299 /* Make copy of operands. */
2302 for (i = 0; i < num_ops; i++)
2303 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2305 /* Clear out SSA operand vectors on COPY. */
2306 if (gimple_has_ops (stmt))
2308 gimple_set_def_ops (copy, NULL);
2309 gimple_set_use_ops (copy, NULL);
2312 if (gimple_has_mem_ops (stmt))
2314 gimple_set_vdef (copy, gimple_vdef (stmt));
2315 gimple_set_vuse (copy, gimple_vuse (stmt));
2318 /* SSA operands need to be updated. */
2319 gimple_set_modified (copy, true);
2326 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2327 a MODIFIED field. */
2330 gimple_set_modified (gimple s, bool modifiedp)
2332 if (gimple_has_ops (s))
2333 s->gsbase.modified = (unsigned) modifiedp;
2337 /* Return true if statement S has side-effects. We consider a
2338 statement to have side effects if:
2340 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2341 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2344 gimple_has_side_effects (const_gimple s)
2348 if (is_gimple_debug (s))
2351 /* We don't have to scan the arguments to check for
2352 volatile arguments, though, at present, we still
2353 do a scan to check for TREE_SIDE_EFFECTS. */
2354 if (gimple_has_volatile_ops (s))
2357 if (is_gimple_call (s))
2359 unsigned nargs = gimple_call_num_args (s);
2362 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2364 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2365 /* An infinite loop is considered a side effect. */
2368 if (gimple_call_lhs (s)
2369 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2371 gcc_assert (gimple_has_volatile_ops (s));
2375 fn = gimple_call_fn (s);
2376 if (fn && TREE_SIDE_EFFECTS (fn))
2379 for (i = 0; i < nargs; i++)
2380 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2382 gcc_assert (gimple_has_volatile_ops (s));
2390 for (i = 0; i < gimple_num_ops (s); i++)
2391 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2393 gcc_assert (gimple_has_volatile_ops (s));
2401 /* Return true if the RHS of statement S has side effects.
2402 We may use it to determine if it is admissable to replace
2403 an assignment or call with a copy of a previously-computed
2404 value. In such cases, side-effects due to the LHS are
2408 gimple_rhs_has_side_effects (const_gimple s)
2412 if (is_gimple_call (s))
2414 unsigned nargs = gimple_call_num_args (s);
2417 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2420 /* We cannot use gimple_has_volatile_ops here,
2421 because we must ignore a volatile LHS. */
2422 fn = gimple_call_fn (s);
2423 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2425 gcc_assert (gimple_has_volatile_ops (s));
2429 for (i = 0; i < nargs; i++)
2430 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2431 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2436 else if (is_gimple_assign (s))
2438 /* Skip the first operand, the LHS. */
2439 for (i = 1; i < gimple_num_ops (s); i++)
2440 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2441 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2443 gcc_assert (gimple_has_volatile_ops (s));
2447 else if (is_gimple_debug (s))
2451 /* For statements without an LHS, examine all arguments. */
2452 for (i = 0; i < gimple_num_ops (s); i++)
2453 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2454 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2456 gcc_assert (gimple_has_volatile_ops (s));
2464 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2465 Return true if S can trap. When INCLUDE_MEM is true, check whether
2466 the memory operations could trap. When INCLUDE_STORES is true and
2467 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2470 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2472 tree t, div = NULL_TREE;
2477 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2479 for (i = start; i < gimple_num_ops (s); i++)
2480 if (tree_could_trap_p (gimple_op (s, i)))
2484 switch (gimple_code (s))
2487 return gimple_asm_volatile_p (s);
2490 t = gimple_call_fndecl (s);
2491 /* Assume that calls to weak functions may trap. */
2492 if (!t || !DECL_P (t) || DECL_WEAK (t))
2497 t = gimple_expr_type (s);
2498 op = gimple_assign_rhs_code (s);
2499 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2500 div = gimple_assign_rhs2 (s);
2501 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2502 (INTEGRAL_TYPE_P (t)
2503 && TYPE_OVERFLOW_TRAPS (t)),
2513 /* Return true if statement S can trap. */
2516 gimple_could_trap_p (gimple s)
2518 return gimple_could_trap_p_1 (s, true, true);
2521 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2524 gimple_assign_rhs_could_trap_p (gimple s)
2526 gcc_assert (is_gimple_assign (s));
2527 return gimple_could_trap_p_1 (s, true, false);
2531 /* Print debugging information for gimple stmts generated. */
2534 dump_gimple_statistics (void)
2536 #ifdef GATHER_STATISTICS
2537 int i, total_tuples = 0, total_bytes = 0;
2539 fprintf (stderr, "\nGIMPLE statements\n");
2540 fprintf (stderr, "Kind Stmts Bytes\n");
2541 fprintf (stderr, "---------------------------------------\n");
2542 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2544 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2545 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2546 total_tuples += gimple_alloc_counts[i];
2547 total_bytes += gimple_alloc_sizes[i];
2549 fprintf (stderr, "---------------------------------------\n");
2550 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2551 fprintf (stderr, "---------------------------------------\n");
2553 fprintf (stderr, "No gimple statistics\n");
2558 /* Return the number of operands needed on the RHS of a GIMPLE
2559 assignment for an expression with tree code CODE. */
2562 get_gimple_rhs_num_ops (enum tree_code code)
2564 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2566 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2568 else if (rhs_class == GIMPLE_BINARY_RHS)
2570 else if (rhs_class == GIMPLE_TERNARY_RHS)
2576 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2578 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2579 : ((TYPE) == tcc_binary \
2580 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2581 : ((TYPE) == tcc_constant \
2582 || (TYPE) == tcc_declaration \
2583 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2584 : ((SYM) == TRUTH_AND_EXPR \
2585 || (SYM) == TRUTH_OR_EXPR \
2586 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2587 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2588 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2589 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2590 || (SYM) == DOT_PROD_EXPR \
2591 || (SYM) == REALIGN_LOAD_EXPR \
2592 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2593 : ((SYM) == COND_EXPR \
2594 || (SYM) == CONSTRUCTOR \
2595 || (SYM) == OBJ_TYPE_REF \
2596 || (SYM) == ASSERT_EXPR \
2597 || (SYM) == ADDR_EXPR \
2598 || (SYM) == WITH_SIZE_EXPR \
2599 || (SYM) == SSA_NAME \
2600 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2601 : GIMPLE_INVALID_RHS),
2602 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2604 const unsigned char gimple_rhs_class_table[] = {
2605 #include "all-tree.def"
2609 #undef END_OF_BASE_TREE_CODES
2611 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2613 /* Validation of GIMPLE expressions. */
2615 /* Returns true iff T is a valid RHS for an assignment to a renamed
2616 user -- or front-end generated artificial -- variable. */
2619 is_gimple_reg_rhs (tree t)
2621 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2624 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2625 LHS, or for a call argument. */
2628 is_gimple_mem_rhs (tree t)
2630 /* If we're dealing with a renamable type, either source or dest must be
2631 a renamed variable. */
2632 if (is_gimple_reg_type (TREE_TYPE (t)))
2633 return is_gimple_val (t);
2635 return is_gimple_val (t) || is_gimple_lvalue (t);
2638 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2641 is_gimple_lvalue (tree t)
2643 return (is_gimple_addressable (t)
2644 || TREE_CODE (t) == WITH_SIZE_EXPR
2645 /* These are complex lvalues, but don't have addresses, so they
2647 || TREE_CODE (t) == BIT_FIELD_REF);
2650 /* Return true if T is a GIMPLE condition. */
2653 is_gimple_condexpr (tree t)
2655 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2656 && !tree_could_throw_p (t)
2657 && is_gimple_val (TREE_OPERAND (t, 0))
2658 && is_gimple_val (TREE_OPERAND (t, 1))));
2661 /* Return true if T is something whose address can be taken. */
2664 is_gimple_addressable (tree t)
2666 return (is_gimple_id (t) || handled_component_p (t)
2667 || TREE_CODE (t) == MEM_REF);
2670 /* Return true if T is a valid gimple constant. */
2673 is_gimple_constant (const_tree t)
2675 switch (TREE_CODE (t))
2685 /* Vector constant constructors are gimple invariant. */
2687 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2688 return TREE_CONSTANT (t);
2697 /* Return true if T is a gimple address. */
2700 is_gimple_address (const_tree t)
2704 if (TREE_CODE (t) != ADDR_EXPR)
2707 op = TREE_OPERAND (t, 0);
2708 while (handled_component_p (op))
2710 if ((TREE_CODE (op) == ARRAY_REF
2711 || TREE_CODE (op) == ARRAY_RANGE_REF)
2712 && !is_gimple_val (TREE_OPERAND (op, 1)))
2715 op = TREE_OPERAND (op, 0);
2718 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2721 switch (TREE_CODE (op))
2736 /* Strip out all handled components that produce invariant
2740 strip_invariant_refs (const_tree op)
2742 while (handled_component_p (op))
2744 switch (TREE_CODE (op))
2747 case ARRAY_RANGE_REF:
2748 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2749 || TREE_OPERAND (op, 2) != NULL_TREE
2750 || TREE_OPERAND (op, 3) != NULL_TREE)
2755 if (TREE_OPERAND (op, 2) != NULL_TREE)
2761 op = TREE_OPERAND (op, 0);
2767 /* Return true if T is a gimple invariant address. */
2770 is_gimple_invariant_address (const_tree t)
2774 if (TREE_CODE (t) != ADDR_EXPR)
2777 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2781 if (TREE_CODE (op) == MEM_REF)
2783 const_tree op0 = TREE_OPERAND (op, 0);
2784 return (TREE_CODE (op0) == ADDR_EXPR
2785 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2786 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2789 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2792 /* Return true if T is a gimple invariant address at IPA level
2793 (so addresses of variables on stack are not allowed). */
2796 is_gimple_ip_invariant_address (const_tree t)
2800 if (TREE_CODE (t) != ADDR_EXPR)
2803 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2805 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2808 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2809 form of function invariant. */
2812 is_gimple_min_invariant (const_tree t)
2814 if (TREE_CODE (t) == ADDR_EXPR)
2815 return is_gimple_invariant_address (t);
2817 return is_gimple_constant (t);
2820 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2821 form of gimple minimal invariant. */
2824 is_gimple_ip_invariant (const_tree t)
2826 if (TREE_CODE (t) == ADDR_EXPR)
2827 return is_gimple_ip_invariant_address (t);
2829 return is_gimple_constant (t);
2832 /* Return true if T looks like a valid GIMPLE statement. */
2835 is_gimple_stmt (tree t)
2837 const enum tree_code code = TREE_CODE (t);
2842 /* The only valid NOP_EXPR is the empty statement. */
2843 return IS_EMPTY_STMT (t);
2847 /* These are only valid if they're void. */
2848 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2854 case CASE_LABEL_EXPR:
2855 case TRY_CATCH_EXPR:
2856 case TRY_FINALLY_EXPR:
2857 case EH_FILTER_EXPR:
2860 case STATEMENT_LIST:
2870 /* These are always void. */
2876 /* These are valid regardless of their type. */
2884 /* Return true if T is a variable. */
2887 is_gimple_variable (tree t)
2889 return (TREE_CODE (t) == VAR_DECL
2890 || TREE_CODE (t) == PARM_DECL
2891 || TREE_CODE (t) == RESULT_DECL
2892 || TREE_CODE (t) == SSA_NAME);
2895 /* Return true if T is a GIMPLE identifier (something with an address). */
2898 is_gimple_id (tree t)
2900 return (is_gimple_variable (t)
2901 || TREE_CODE (t) == FUNCTION_DECL
2902 || TREE_CODE (t) == LABEL_DECL
2903 || TREE_CODE (t) == CONST_DECL
2904 /* Allow string constants, since they are addressable. */
2905 || TREE_CODE (t) == STRING_CST);
2908 /* Return true if TYPE is a suitable type for a scalar register variable. */
2911 is_gimple_reg_type (tree type)
2913 return !AGGREGATE_TYPE_P (type);
2916 /* Return true if T is a non-aggregate register variable. */
2919 is_gimple_reg (tree t)
2921 if (TREE_CODE (t) == SSA_NAME)
2922 t = SSA_NAME_VAR (t);
2924 if (!is_gimple_variable (t))
2927 if (!is_gimple_reg_type (TREE_TYPE (t)))
2930 /* A volatile decl is not acceptable because we can't reuse it as
2931 needed. We need to copy it into a temp first. */
2932 if (TREE_THIS_VOLATILE (t))
2935 /* We define "registers" as things that can be renamed as needed,
2936 which with our infrastructure does not apply to memory. */
2937 if (needs_to_live_in_memory (t))
2940 /* Hard register variables are an interesting case. For those that
2941 are call-clobbered, we don't know where all the calls are, since
2942 we don't (want to) take into account which operations will turn
2943 into libcalls at the rtl level. For those that are call-saved,
2944 we don't currently model the fact that calls may in fact change
2945 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2946 level, and so miss variable changes that might imply. All around,
2947 it seems safest to not do too much optimization with these at the
2948 tree level at all. We'll have to rely on the rtl optimizers to
2949 clean this up, as there we've got all the appropriate bits exposed. */
2950 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2953 /* Complex and vector values must have been put into SSA-like form.
2954 That is, no assignments to the individual components. */
2955 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2956 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2957 return DECL_GIMPLE_REG_P (t);
2963 /* Return true if T is a GIMPLE variable whose address is not needed. */
2966 is_gimple_non_addressable (tree t)
2968 if (TREE_CODE (t) == SSA_NAME)
2969 t = SSA_NAME_VAR (t);
2971 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2974 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2977 is_gimple_val (tree t)
2979 /* Make loads from volatiles and memory vars explicit. */
2980 if (is_gimple_variable (t)
2981 && is_gimple_reg_type (TREE_TYPE (t))
2982 && !is_gimple_reg (t))
2985 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2988 /* Similarly, but accept hard registers as inputs to asm statements. */
2991 is_gimple_asm_val (tree t)
2993 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2996 return is_gimple_val (t);
2999 /* Return true if T is a GIMPLE minimal lvalue. */
3002 is_gimple_min_lval (tree t)
3004 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3006 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3009 /* Return true if T is a valid function operand of a CALL_EXPR. */
3012 is_gimple_call_addr (tree t)
3014 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3017 /* Return true if T is a valid address operand of a MEM_REF. */
3020 is_gimple_mem_ref_addr (tree t)
3022 return (is_gimple_reg (t)
3023 || TREE_CODE (t) == INTEGER_CST
3024 || (TREE_CODE (t) == ADDR_EXPR
3025 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3026 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3029 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3030 Otherwise, return NULL_TREE. */
3033 get_call_expr_in (tree t)
3035 if (TREE_CODE (t) == MODIFY_EXPR)
3036 t = TREE_OPERAND (t, 1);
3037 if (TREE_CODE (t) == WITH_SIZE_EXPR)
3038 t = TREE_OPERAND (t, 0);
3039 if (TREE_CODE (t) == CALL_EXPR)
3045 /* Given a memory reference expression T, return its base address.
3046 The base address of a memory reference expression is the main
3047 object being referenced. For instance, the base address for
3048 'array[i].fld[j]' is 'array'. You can think of this as stripping
3049 away the offset part from a memory address.
3051 This function calls handled_component_p to strip away all the inner
3052 parts of the memory reference until it reaches the base object. */
3055 get_base_address (tree t)
3057 while (handled_component_p (t))
3058 t = TREE_OPERAND (t, 0);
3060 if ((TREE_CODE (t) == MEM_REF
3061 || TREE_CODE (t) == TARGET_MEM_REF)
3062 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3063 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3065 if (TREE_CODE (t) == SSA_NAME
3067 || TREE_CODE (t) == STRING_CST
3068 || TREE_CODE (t) == CONSTRUCTOR
3069 || INDIRECT_REF_P (t)
3070 || TREE_CODE (t) == MEM_REF
3071 || TREE_CODE (t) == TARGET_MEM_REF)
3078 recalculate_side_effects (tree t)
3080 enum tree_code code = TREE_CODE (t);
3081 int len = TREE_OPERAND_LENGTH (t);
3084 switch (TREE_CODE_CLASS (code))
3086 case tcc_expression:
3092 case PREDECREMENT_EXPR:
3093 case PREINCREMENT_EXPR:
3094 case POSTDECREMENT_EXPR:
3095 case POSTINCREMENT_EXPR:
3096 /* All of these have side-effects, no matter what their
3105 case tcc_comparison: /* a comparison expression */
3106 case tcc_unary: /* a unary arithmetic expression */
3107 case tcc_binary: /* a binary arithmetic expression */
3108 case tcc_reference: /* a reference */
3109 case tcc_vl_exp: /* a function call */
3110 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3111 for (i = 0; i < len; ++i)
3113 tree op = TREE_OPERAND (t, i);
3114 if (op && TREE_SIDE_EFFECTS (op))
3115 TREE_SIDE_EFFECTS (t) = 1;
3120 /* No side-effects. */
3128 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3129 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3130 we failed to create one. */
3133 canonicalize_cond_expr_cond (tree t)
3135 /* Strip conversions around boolean operations. */
3136 if (CONVERT_EXPR_P (t)
3137 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3138 t = TREE_OPERAND (t, 0);
3140 /* For (bool)x use x != 0. */
3141 if (CONVERT_EXPR_P (t)
3142 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3144 tree top0 = TREE_OPERAND (t, 0);
3145 t = build2 (NE_EXPR, TREE_TYPE (t),
3146 top0, build_int_cst (TREE_TYPE (top0), 0));
3148 /* For !x use x == 0. */
3149 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3151 tree top0 = TREE_OPERAND (t, 0);
3152 t = build2 (EQ_EXPR, TREE_TYPE (t),
3153 top0, build_int_cst (TREE_TYPE (top0), 0));
3155 /* For cmp ? 1 : 0 use cmp. */
3156 else if (TREE_CODE (t) == COND_EXPR
3157 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3158 && integer_onep (TREE_OPERAND (t, 1))
3159 && integer_zerop (TREE_OPERAND (t, 2)))
3161 tree top0 = TREE_OPERAND (t, 0);
3162 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3163 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3166 if (is_gimple_condexpr (t))
3172 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3173 the positions marked by the set ARGS_TO_SKIP. */
3176 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3179 int nargs = gimple_call_num_args (stmt);
3180 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3183 for (i = 0; i < nargs; i++)
3184 if (!bitmap_bit_p (args_to_skip, i))
3185 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3187 if (gimple_call_internal_p (stmt))
3188 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3191 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3192 VEC_free (tree, heap, vargs);
3193 if (gimple_call_lhs (stmt))
3194 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3196 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3197 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3199 gimple_set_block (new_stmt, gimple_block (stmt));
3200 if (gimple_has_location (stmt))
3201 gimple_set_location (new_stmt, gimple_location (stmt));
3202 gimple_call_copy_flags (new_stmt, stmt);
3203 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3205 gimple_set_modified (new_stmt, true);
3211 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3213 static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
3215 /* Structure used to maintain a cache of some type pairs compared by
3216 gimple_types_compatible_p when comparing aggregate types. There are
3217 three possible values for SAME_P:
3219 -2: The pair (T1, T2) has just been inserted in the table.
3220 0: T1 and T2 are different types.
3221 1: T1 and T2 are the same type.
3223 The two elements in the SAME_P array are indexed by the comparison
3230 signed char same_p[2];
3232 typedef struct type_pair_d *type_pair_t;
3234 DEF_VEC_P(type_pair_t);
3235 DEF_VEC_ALLOC_P(type_pair_t,heap);
3237 /* Return a hash value for the type pair pointed-to by P. */
3240 type_pair_hash (const void *p)
3242 const struct type_pair_d *pair = (const struct type_pair_d *) p;
3243 hashval_t val1 = pair->uid1;
3244 hashval_t val2 = pair->uid2;
3245 return iterative_hash_hashval_t (val1, val2);
3248 /* Compare two type pairs pointed-to by P1 and P2. */
3251 type_pair_eq (const void *p1, const void *p2)
3253 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3254 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3255 return (pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2);
3258 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3259 entry if none existed. */
3262 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3264 struct type_pair_d pair;
3268 if (*visited_p == NULL)
3270 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3271 gcc_obstack_init (ob_p);
3274 if (TYPE_UID (t1) < TYPE_UID (t2))
3276 pair.uid1 = TYPE_UID (t1);
3277 pair.uid2 = TYPE_UID (t2);
3281 pair.uid1 = TYPE_UID (t2);
3282 pair.uid2 = TYPE_UID (t1);
3284 slot = htab_find_slot (*visited_p, &pair, INSERT);
3287 p = *((type_pair_t *) slot);
3290 p = XOBNEW (ob_p, struct type_pair_d);
3291 p->uid1 = pair.uid1;
3292 p->uid2 = pair.uid2;
3301 /* Per pointer state for the SCC finding. The on_sccstack flag
3302 is not strictly required, it is true when there is no hash value
3303 recorded for the type and false otherwise. But querying that
3308 unsigned int dfsnum;
3317 static unsigned int next_dfs_num;
3318 static unsigned int gtc_next_dfs_num;
3321 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3323 typedef struct GTY(()) gimple_type_leader_entry_s {
3326 } gimple_type_leader_entry;
3328 #define GIMPLE_TYPE_LEADER_SIZE 16381
3329 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3330 gimple_type_leader_entry *gimple_type_leader;
3332 /* Lookup an existing leader for T and return it or NULL_TREE, if
3333 there is none in the cache. */
3336 gimple_lookup_type_leader (tree t)
3338 gimple_type_leader_entry *leader;
3340 if (!gimple_type_leader)
3343 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3344 if (leader->type != t)
3347 return leader->leader;
3350 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3351 true then if any type has no name return false, otherwise return
3352 true if both types have no names. */
3355 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3357 tree name1 = TYPE_NAME (t1);
3358 tree name2 = TYPE_NAME (t2);
3360 /* Consider anonymous types all unique for completion. */
3361 if (for_completion_p
3362 && (!name1 || !name2))
3365 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3367 name1 = DECL_NAME (name1);
3368 if (for_completion_p
3372 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3374 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3376 name2 = DECL_NAME (name2);
3377 if (for_completion_p
3381 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3383 /* Identifiers can be compared with pointer equality rather
3384 than a string comparison. */
3391 /* Return true if the field decls F1 and F2 are at the same offset.
3393 This is intended to be used on GIMPLE types only. */
3396 gimple_compare_field_offset (tree f1, tree f2)
3398 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3400 tree offset1 = DECL_FIELD_OFFSET (f1);
3401 tree offset2 = DECL_FIELD_OFFSET (f2);
3402 return ((offset1 == offset2
3403 /* Once gimplification is done, self-referential offsets are
3404 instantiated as operand #2 of the COMPONENT_REF built for
3405 each access and reset. Therefore, they are not relevant
3406 anymore and fields are interchangeable provided that they
3407 represent the same access. */
3408 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3409 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3410 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3411 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3412 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3413 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3414 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3415 || operand_equal_p (offset1, offset2, 0))
3416 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3417 DECL_FIELD_BIT_OFFSET (f2)));
3420 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3421 should be, so handle differing ones specially by decomposing
3422 the offset into a byte and bit offset manually. */
3423 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3424 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3426 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3427 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3428 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3429 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3430 + bit_offset1 / BITS_PER_UNIT);
3431 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3432 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3433 + bit_offset2 / BITS_PER_UNIT);
3434 if (byte_offset1 != byte_offset2)
3436 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3442 /* If the type T1 and the type T2 are a complete and an incomplete
3443 variant of the same type return true. */
3446 gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
3448 /* If one pointer points to an incomplete type variant of
3449 the other pointed-to type they are the same. */
3450 if (TREE_CODE (t1) == TREE_CODE (t2)
3451 && RECORD_OR_UNION_TYPE_P (t1)
3452 && (!COMPLETE_TYPE_P (t1)
3453 || !COMPLETE_TYPE_P (t2))
3454 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3455 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3456 TYPE_MAIN_VARIANT (t2), true))
3462 gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
3463 VEC(type_pair_t, heap) **,
3464 struct pointer_map_t *, struct obstack *);
3466 /* DFS visit the edge from the callers type pair with state *STATE to
3467 the pair T1, T2 while operating in FOR_MERGING_P mode.
3468 Update the merging status if it is not part of the SCC containing the
3469 callers pair and return it.
3470 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3473 gtc_visit (tree t1, tree t2, enum gtc_mode mode,
3475 VEC(type_pair_t, heap) **sccstack,
3476 struct pointer_map_t *sccstate,
3477 struct obstack *sccstate_obstack)
3479 struct sccs *cstate = NULL;
3483 /* Check first for the obvious case of pointer identity. */
3487 /* Check that we have two types to compare. */
3488 if (t1 == NULL_TREE || t2 == NULL_TREE)
3491 /* If the types have been previously registered and found equal
3493 if (mode == GTC_MERGE)
3495 tree leader1 = gimple_lookup_type_leader (t1);
3496 tree leader2 = gimple_lookup_type_leader (t2);
3499 || (leader1 && leader1 == leader2))
3502 else if (mode == GTC_DIAG)
3504 if (TYPE_CANONICAL (t1)
3505 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3509 /* Can't be the same type if the types don't have the same code. */
3510 if (TREE_CODE (t1) != TREE_CODE (t2))
3513 /* Can't be the same type if they have different CV qualifiers. */
3514 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3517 /* Void types are always the same. */
3518 if (TREE_CODE (t1) == VOID_TYPE)
3521 /* Do some simple checks before doing three hashtable queries. */
3522 if (INTEGRAL_TYPE_P (t1)
3523 || SCALAR_FLOAT_TYPE_P (t1)
3524 || FIXED_POINT_TYPE_P (t1)
3525 || TREE_CODE (t1) == VECTOR_TYPE
3526 || TREE_CODE (t1) == COMPLEX_TYPE
3527 || TREE_CODE (t1) == OFFSET_TYPE)
3529 /* Can't be the same type if they have different alignment,
3530 sign, precision or mode. */
3531 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3532 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3533 || TYPE_MODE (t1) != TYPE_MODE (t2)
3534 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3537 if (TREE_CODE (t1) == INTEGER_TYPE
3538 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3539 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3542 /* That's all we need to check for float and fixed-point types. */
3543 if (SCALAR_FLOAT_TYPE_P (t1)
3544 || FIXED_POINT_TYPE_P (t1))
3547 /* For integral types fall thru to more complex checks. */
3550 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3552 /* Can't be the same type if they have different alignment or mode. */
3553 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3554 || TYPE_MODE (t1) != TYPE_MODE (t2))
3558 /* If the hash values of t1 and t2 are different the types can't
3559 possibly be the same. This helps keeping the type-pair hashtable
3560 small, only tracking comparisons for hash collisions. */
3561 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3564 /* Allocate a new cache entry for this comparison. */
3565 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
3566 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3568 /* We have already decided whether T1 and T2 are the
3569 same, return the cached result. */
3570 return p->same_p[mode] == 1;
3573 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3574 cstate = (struct sccs *)*slot;
3575 /* Not yet visited. DFS recurse. */
3578 gimple_types_compatible_p_1 (t1, t2, mode, p,
3579 sccstack, sccstate, sccstate_obstack);
3580 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3581 state->low = MIN (state->low, cstate->low);
3583 /* If the type is still on the SCC stack adjust the parents low. */
3584 if (cstate->dfsnum < state->dfsnum
3585 && cstate->on_sccstack)
3586 state->low = MIN (cstate->dfsnum, state->low);
3588 /* Return the current lattice value. We start with an equality
3589 assumption so types part of a SCC will be optimistically
3590 treated equal unless proven otherwise. */
3591 return cstate->u.same_p;
3594 /* Worker for gimple_types_compatible.
3595 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3598 gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
3600 VEC(type_pair_t, heap) **sccstack,
3601 struct pointer_map_t *sccstate,
3602 struct obstack *sccstate_obstack)
3606 gcc_assert (p->same_p[mode] == -2);
3608 state = XOBNEW (sccstate_obstack, struct sccs);
3609 *pointer_map_insert (sccstate, p) = state;
3611 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3612 state->dfsnum = gtc_next_dfs_num++;
3613 state->low = state->dfsnum;
3614 state->on_sccstack = true;
3615 /* Start with an equality assumption. As we DFS recurse into child
3616 SCCs this assumption may get revisited. */
3617 state->u.same_p = 1;
3619 /* If their attributes are not the same they can't be the same type. */
3620 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3621 goto different_types;
3623 /* Do type-specific comparisons. */
3624 switch (TREE_CODE (t1))
3628 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3629 state, sccstack, sccstate, sccstate_obstack))
3630 goto different_types;
3634 /* Array types are the same if the element types are the same and
3635 the number of elements are the same. */
3636 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3637 state, sccstack, sccstate, sccstate_obstack)
3638 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3639 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3640 goto different_types;
3643 tree i1 = TYPE_DOMAIN (t1);
3644 tree i2 = TYPE_DOMAIN (t2);
3646 /* For an incomplete external array, the type domain can be
3647 NULL_TREE. Check this condition also. */
3648 if (i1 == NULL_TREE && i2 == NULL_TREE)
3650 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3651 goto different_types;
3652 /* If for a complete array type the possibly gimplified sizes
3653 are different the types are different. */
3654 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3657 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3658 goto different_types;
3661 tree min1 = TYPE_MIN_VALUE (i1);
3662 tree min2 = TYPE_MIN_VALUE (i2);
3663 tree max1 = TYPE_MAX_VALUE (i1);
3664 tree max2 = TYPE_MAX_VALUE (i2);
3666 /* The minimum/maximum values have to be the same. */
3669 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3670 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3671 || operand_equal_p (min1, min2, 0))))
3674 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3675 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3676 || operand_equal_p (max1, max2, 0)))))
3679 goto different_types;
3684 /* Method types should belong to the same class. */
3685 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3686 mode, state, sccstack, sccstate, sccstate_obstack))
3687 goto different_types;
3692 /* Function types are the same if the return type and arguments types
3694 if ((mode != GTC_DIAG
3695 || !gimple_compatible_complete_and_incomplete_subtype_p
3696 (TREE_TYPE (t1), TREE_TYPE (t2)))
3697 && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3698 state, sccstack, sccstate, sccstate_obstack))
3699 goto different_types;
3701 if (!comp_type_attributes (t1, t2))
3702 goto different_types;
3704 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3708 tree parms1, parms2;
3710 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3712 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3714 if ((mode == GTC_MERGE
3715 || !gimple_compatible_complete_and_incomplete_subtype_p
3716 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3717 && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
3718 state, sccstack, sccstate, sccstate_obstack))
3719 goto different_types;
3722 if (parms1 || parms2)
3723 goto different_types;
3730 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3731 state, sccstack, sccstate, sccstate_obstack)
3732 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3733 TYPE_OFFSET_BASETYPE (t2), mode,
3734 state, sccstack, sccstate, sccstate_obstack))
3735 goto different_types;
3741 case REFERENCE_TYPE:
3743 /* If the two pointers have different ref-all attributes,
3744 they can't be the same type. */
3745 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3746 goto different_types;
3748 /* If one pointer points to an incomplete type variant of
3749 the other pointed-to type they are the same. */
3750 if (mode == GTC_DIAG
3751 && gimple_compatible_complete_and_incomplete_subtype_p
3752 (TREE_TYPE (t1), TREE_TYPE (t2)))
3755 /* Otherwise, pointer and reference types are the same if the
3756 pointed-to types are the same. */
3757 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3758 state, sccstack, sccstate, sccstate_obstack))
3761 goto different_types;
3765 /* There is only one decltype(nullptr). */
3771 tree min1 = TYPE_MIN_VALUE (t1);
3772 tree max1 = TYPE_MAX_VALUE (t1);
3773 tree min2 = TYPE_MIN_VALUE (t2);
3774 tree max2 = TYPE_MAX_VALUE (t2);
3775 bool min_equal_p = false;
3776 bool max_equal_p = false;
3778 /* If either type has a minimum value, the other type must
3780 if (min1 == NULL_TREE && min2 == NULL_TREE)
3782 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3785 /* Likewise, if either type has a maximum value, the other
3786 type must have the same. */
3787 if (max1 == NULL_TREE && max2 == NULL_TREE)
3789 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3792 if (!min_equal_p || !max_equal_p)
3793 goto different_types;
3800 /* FIXME lto, we cannot check bounds on enumeral types because
3801 different front ends will produce different values.
3802 In C, enumeral types are integers, while in C++ each element
3803 will have its own symbolic value. We should decide how enums
3804 are to be represented in GIMPLE and have each front end lower
3808 /* For enumeral types, all the values must be the same. */
3809 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3812 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3814 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3816 tree c1 = TREE_VALUE (v1);
3817 tree c2 = TREE_VALUE (v2);
3819 if (TREE_CODE (c1) == CONST_DECL)
3820 c1 = DECL_INITIAL (c1);
3822 if (TREE_CODE (c2) == CONST_DECL)
3823 c2 = DECL_INITIAL (c2);
3825 if (tree_int_cst_equal (c1, c2) != 1)
3826 goto different_types;
3828 if (mode == GTC_MERGE && TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3829 goto different_types;
3832 /* If one enumeration has more values than the other, they
3833 are not the same. */
3835 goto different_types;
3842 case QUAL_UNION_TYPE:
3846 /* The struct tags shall compare equal. */
3847 if (mode == GTC_MERGE
3848 && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3849 TYPE_MAIN_VARIANT (t2), false))
3850 goto different_types;
3852 /* For aggregate types, all the fields must be the same. */
3853 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3855 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3857 /* The fields must have the same name, offset and type. */
3858 if ((mode == GTC_MERGE
3859 && DECL_NAME (f1) != DECL_NAME (f2))
3860 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3861 || !gimple_compare_field_offset (f1, f2)
3862 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
3863 state, sccstack, sccstate, sccstate_obstack))
3864 goto different_types;
3867 /* If one aggregate has more fields than the other, they
3868 are not the same. */
3870 goto different_types;
3879 /* Common exit path for types that are not compatible. */
3881 state->u.same_p = 0;
3884 /* Common exit path for types that are compatible. */
3886 gcc_assert (state->u.same_p == 1);
3889 if (state->low == state->dfsnum)
3893 /* Pop off the SCC and set its cache values to the final
3894 comparison result. */
3897 struct sccs *cstate;
3898 x = VEC_pop (type_pair_t, *sccstack);
3899 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3900 cstate->on_sccstack = false;
3901 x->same_p[mode] = state->u.same_p;
3906 return state->u.same_p;
3909 /* Return true iff T1 and T2 are structurally identical. When
3910 FOR_MERGING_P is true the an incomplete type and a complete type
3911 are considered different, otherwise they are considered compatible. */
3914 gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
3916 VEC(type_pair_t, heap) *sccstack = NULL;
3917 struct pointer_map_t *sccstate;
3918 struct obstack sccstate_obstack;
3919 type_pair_t p = NULL;
3922 /* Before starting to set up the SCC machinery handle simple cases. */
3924 /* Check first for the obvious case of pointer identity. */
3928 /* Check that we have two types to compare. */
3929 if (t1 == NULL_TREE || t2 == NULL_TREE)
3932 /* If the types have been previously registered and found equal
3934 if (mode == GTC_MERGE)
3936 tree leader1 = gimple_lookup_type_leader (t1);
3937 tree leader2 = gimple_lookup_type_leader (t2);
3940 || (leader1 && leader1 == leader2))
3943 else if (mode == GTC_DIAG)
3945 if (TYPE_CANONICAL (t1)
3946 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3950 /* Can't be the same type if the types don't have the same code. */
3951 if (TREE_CODE (t1) != TREE_CODE (t2))
3954 /* Can't be the same type if they have different CV qualifiers. */
3955 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3958 /* Void types are always the same. */
3959 if (TREE_CODE (t1) == VOID_TYPE)
3962 /* Do some simple checks before doing three hashtable queries. */
3963 if (INTEGRAL_TYPE_P (t1)
3964 || SCALAR_FLOAT_TYPE_P (t1)
3965 || FIXED_POINT_TYPE_P (t1)
3966 || TREE_CODE (t1) == VECTOR_TYPE
3967 || TREE_CODE (t1) == COMPLEX_TYPE
3968 || TREE_CODE (t1) == OFFSET_TYPE)
3970 /* Can't be the same type if they have different alignment,
3971 sign, precision or mode. */
3972 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3973 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3974 || TYPE_MODE (t1) != TYPE_MODE (t2)
3975 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3978 if (TREE_CODE (t1) == INTEGER_TYPE
3979 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3980 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3983 /* That's all we need to check for float and fixed-point types. */
3984 if (SCALAR_FLOAT_TYPE_P (t1)
3985 || FIXED_POINT_TYPE_P (t1))
3988 /* For integral types fall thru to more complex checks. */
3991 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3993 /* Can't be the same type if they have different alignment or mode. */
3994 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3995 || TYPE_MODE (t1) != TYPE_MODE (t2))
3999 /* If the hash values of t1 and t2 are different the types can't
4000 possibly be the same. This helps keeping the type-pair hashtable
4001 small, only tracking comparisons for hash collisions. */
4002 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
4005 /* If we've visited this type pair before (in the case of aggregates
4006 with self-referential types), and we made a decision, return it. */
4007 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
4008 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
4010 /* We have already decided whether T1 and T2 are the
4011 same, return the cached result. */
4012 return p->same_p[mode] == 1;
4015 /* Now set up the SCC machinery for the comparison. */
4016 gtc_next_dfs_num = 1;
4017 sccstate = pointer_map_create ();
4018 gcc_obstack_init (&sccstate_obstack);
4019 res = gimple_types_compatible_p_1 (t1, t2, mode, p,
4020 &sccstack, sccstate, &sccstate_obstack);
4021 VEC_free (type_pair_t, heap, sccstack);
4022 pointer_map_destroy (sccstate);
4023 obstack_free (&sccstate_obstack, NULL);
4030 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
4031 struct pointer_map_t *, struct obstack *,
4034 /* DFS visit the edge from the callers type with state *STATE to T.
4035 Update the callers type hash V with the hash for T if it is not part
4036 of the SCC containing the callers type and return it.
4037 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
4040 visit (tree t, struct sccs *state, hashval_t v,
4041 VEC (tree, heap) **sccstack,
4042 struct pointer_map_t *sccstate,
4043 struct obstack *sccstate_obstack, enum gtc_mode mode)
4045 struct sccs *cstate = NULL;
4046 struct tree_int_map m;
4049 /* If there is a hash value recorded for this type then it can't
4050 possibly be part of our parent SCC. Simply mix in its hash. */
4052 if ((slot = htab_find_slot (mode == GTC_MERGE
4053 ? type_hash_cache : canonical_type_hash_cache,
4056 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
4058 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
4059 cstate = (struct sccs *)*slot;
4063 /* Not yet visited. DFS recurse. */
4064 tem = iterative_hash_gimple_type (t, v,
4065 sccstack, sccstate, sccstate_obstack,
4068 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4069 state->low = MIN (state->low, cstate->low);
4070 /* If the type is no longer on the SCC stack and thus is not part
4071 of the parents SCC mix in its hash value. Otherwise we will
4072 ignore the type for hashing purposes and return the unaltered
4074 if (!cstate->on_sccstack)
4077 if (cstate->dfsnum < state->dfsnum
4078 && cstate->on_sccstack)
4079 state->low = MIN (cstate->dfsnum, state->low);
4081 /* We are part of our parents SCC, skip this type during hashing
4082 and return the unaltered hash value. */
4086 /* Hash NAME with the previous hash value V and return it. */
4089 iterative_hash_name (tree name, hashval_t v)
4093 if (TREE_CODE (name) == TYPE_DECL)
4094 name = DECL_NAME (name);
4097 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4098 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4101 /* Returning a hash value for gimple type TYPE combined with VAL.
4102 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4104 To hash a type we end up hashing in types that are reachable.
4105 Through pointers we can end up with cycles which messes up the
4106 required property that we need to compute the same hash value
4107 for structurally equivalent types. To avoid this we have to
4108 hash all types in a cycle (the SCC) in a commutative way. The
4109 easiest way is to not mix in the hashes of the SCC members at
4110 all. To make this work we have to delay setting the hash
4111 values of the SCC until it is complete. */
4114 iterative_hash_gimple_type (tree type, hashval_t val,
4115 VEC(tree, heap) **sccstack,
4116 struct pointer_map_t *sccstate,
4117 struct obstack *sccstate_obstack,
4124 /* Not visited during this DFS walk. */
4125 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4126 state = XOBNEW (sccstate_obstack, struct sccs);
4127 *pointer_map_insert (sccstate, type) = state;
4129 VEC_safe_push (tree, heap, *sccstack, type);
4130 state->dfsnum = next_dfs_num++;
4131 state->low = state->dfsnum;
4132 state->on_sccstack = true;
4134 /* Combine a few common features of types so that types are grouped into
4135 smaller sets; when searching for existing matching types to merge,
4136 only existing types having the same features as the new type will be
4138 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4139 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4140 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4142 /* Do not hash the types size as this will cause differences in
4143 hash values for the complete vs. the incomplete type variant. */
4145 /* Incorporate common features of numerical types. */
4146 if (INTEGRAL_TYPE_P (type)
4147 || SCALAR_FLOAT_TYPE_P (type)
4148 || FIXED_POINT_TYPE_P (type))
4150 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4151 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4152 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4155 /* For pointer and reference types, fold in information about the type
4156 pointed to but do not recurse into possibly incomplete types to
4157 avoid hash differences for complete vs. incomplete types. */
4158 if (POINTER_TYPE_P (type))
4160 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4162 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4163 v = iterative_hash_name
4164 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4167 v = visit (TREE_TYPE (type), state, v,
4168 sccstack, sccstate, sccstate_obstack, mode);
4171 /* For integer types hash the types min/max values and the string flag. */
4172 if (TREE_CODE (type) == INTEGER_TYPE)
4174 /* OMP lowering can introduce error_mark_node in place of
4175 random local decls in types. */
4176 if (TYPE_MIN_VALUE (type) != error_mark_node)
4177 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4178 if (TYPE_MAX_VALUE (type) != error_mark_node)
4179 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4180 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4183 /* For array types hash their domain and the string flag. */
4184 if (TREE_CODE (type) == ARRAY_TYPE
4185 && TYPE_DOMAIN (type))
4187 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4188 v = visit (TYPE_DOMAIN (type), state, v,
4189 sccstack, sccstate, sccstate_obstack, mode);
4192 /* Recurse for aggregates with a single element type. */
4193 if (TREE_CODE (type) == ARRAY_TYPE
4194 || TREE_CODE (type) == COMPLEX_TYPE
4195 || TREE_CODE (type) == VECTOR_TYPE)
4196 v = visit (TREE_TYPE (type), state, v,
4197 sccstack, sccstate, sccstate_obstack, mode);
4199 /* Incorporate function return and argument types. */
4200 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4205 /* For method types also incorporate their parent class. */
4206 if (TREE_CODE (type) == METHOD_TYPE)
4207 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4208 sccstack, sccstate, sccstate_obstack, mode);
4210 /* For result types allow mismatch in completeness. */
4211 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4213 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4214 v = iterative_hash_name
4215 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4218 v = visit (TREE_TYPE (type), state, v,
4219 sccstack, sccstate, sccstate_obstack, mode);
4221 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4223 /* For argument types allow mismatch in completeness. */
4224 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4226 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4227 v = iterative_hash_name
4228 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
4231 v = visit (TREE_VALUE (p), state, v,
4232 sccstack, sccstate, sccstate_obstack, mode);
4236 v = iterative_hash_hashval_t (na, v);
4239 if (TREE_CODE (type) == RECORD_TYPE
4240 || TREE_CODE (type) == UNION_TYPE
4241 || TREE_CODE (type) == QUAL_UNION_TYPE)
4246 if (mode == GTC_MERGE)
4247 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
4249 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4251 if (mode == GTC_MERGE)
4252 v = iterative_hash_name (DECL_NAME (f), v);
4253 v = visit (TREE_TYPE (f), state, v,
4254 sccstack, sccstate, sccstate_obstack, mode);
4258 v = iterative_hash_hashval_t (nf, v);
4261 /* Record hash for us. */
4264 /* See if we found an SCC. */
4265 if (state->low == state->dfsnum)
4269 /* Pop off the SCC and set its hash values. */
4272 struct sccs *cstate;
4273 struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
4274 x = VEC_pop (tree, *sccstack);
4275 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4276 cstate->on_sccstack = false;
4278 m->to = cstate->u.hash;
4279 slot = htab_find_slot (mode == GTC_MERGE
4280 ? type_hash_cache : canonical_type_hash_cache,
4282 gcc_assert (!*slot);
4288 return iterative_hash_hashval_t (v, val);
4292 /* Returns a hash value for P (assumed to be a type). The hash value
4293 is computed using some distinguishing features of the type. Note
4294 that we cannot use pointer hashing here as we may be dealing with
4295 two distinct instances of the same type.
4297 This function should produce the same hash value for two compatible
4298 types according to gimple_types_compatible_p. */
4301 gimple_type_hash_1 (const void *p, enum gtc_mode mode)
4303 const_tree t = (const_tree) p;
4304 VEC(tree, heap) *sccstack = NULL;
4305 struct pointer_map_t *sccstate;
4306 struct obstack sccstate_obstack;
4309 struct tree_int_map m;
4311 if (mode == GTC_MERGE
4312 && type_hash_cache == NULL)
4313 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4314 tree_int_map_eq, NULL);
4315 else if (mode == GTC_DIAG
4316 && canonical_type_hash_cache == NULL)
4317 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4318 tree_int_map_eq, NULL);
4320 m.base.from = CONST_CAST_TREE (t);
4321 if ((slot = htab_find_slot (mode == GTC_MERGE
4322 ? type_hash_cache : canonical_type_hash_cache,
4325 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4327 /* Perform a DFS walk and pre-hash all reachable types. */
4329 sccstate = pointer_map_create ();
4330 gcc_obstack_init (&sccstate_obstack);
4331 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4332 &sccstack, sccstate, &sccstate_obstack,
4334 VEC_free (tree, heap, sccstack);
4335 pointer_map_destroy (sccstate);
4336 obstack_free (&sccstate_obstack, NULL);
4342 gimple_type_hash (const void *p)
4344 return gimple_type_hash_1 (p, GTC_MERGE);
4348 gimple_canonical_type_hash (const void *p)
4350 return gimple_type_hash_1 (p, GTC_DIAG);
4354 /* Returns nonzero if P1 and P2 are equal. */
4357 gimple_type_eq (const void *p1, const void *p2)
4359 const_tree t1 = (const_tree) p1;
4360 const_tree t2 = (const_tree) p2;
4361 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4362 CONST_CAST_TREE (t2), GTC_MERGE);
4366 /* Register type T in the global type table gimple_types.
4367 If another type T', compatible with T, already existed in
4368 gimple_types then return T', otherwise return T. This is used by
4369 LTO to merge identical types read from different TUs. */
4372 gimple_register_type (tree t)
4375 gimple_type_leader_entry *leader;
4376 tree mv_leader = NULL_TREE;
4378 gcc_assert (TYPE_P (t));
4380 if (!gimple_type_leader)
4381 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4382 (GIMPLE_TYPE_LEADER_SIZE);
4383 /* If we registered this type before return the cached result. */
4384 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4385 if (leader->type == t)
4386 return leader->leader;
4388 /* Always register the main variant first. This is important so we
4389 pick up the non-typedef variants as canonical, otherwise we'll end
4390 up taking typedef ids for structure tags during comparison. */
4391 if (TYPE_MAIN_VARIANT (t) != t)
4392 mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
4394 if (gimple_types == NULL)
4395 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4397 slot = htab_find_slot (gimple_types, t, INSERT);
4399 && *(tree *)slot != t)
4401 tree new_type = (tree) *((tree *) slot);
4403 /* Do not merge types with different addressability. */
4404 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4406 /* If t is not its main variant then make t unreachable from its
4407 main variant list. Otherwise we'd queue up a lot of duplicates
4409 if (t != TYPE_MAIN_VARIANT (t))
4411 tree tem = TYPE_MAIN_VARIANT (t);
4412 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4413 tem = TYPE_NEXT_VARIANT (tem);
4415 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4416 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4419 /* If we are a pointer then remove us from the pointer-to or
4420 reference-to chain. Otherwise we'd queue up a lot of duplicates
4422 if (TREE_CODE (t) == POINTER_TYPE)
4424 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4425 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4428 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4429 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4430 tem = TYPE_NEXT_PTR_TO (tem);
4432 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4434 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4436 else if (TREE_CODE (t) == REFERENCE_TYPE)
4438 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4439 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4442 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4443 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4444 tem = TYPE_NEXT_REF_TO (tem);
4446 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4448 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4452 leader->leader = new_type;
4459 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4460 if (TYPE_MAIN_VARIANT (t) != t
4461 && TYPE_MAIN_VARIANT (t) != mv_leader)
4463 /* Remove us from our main variant list as we are not the variant
4464 leader and the variant leader will change. */
4465 tree tem = TYPE_MAIN_VARIANT (t);
4466 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4467 tem = TYPE_NEXT_VARIANT (tem);
4469 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4470 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4471 /* Adjust our main variant. Linking us into its variant list
4472 will happen at fixup time. */
4473 TYPE_MAIN_VARIANT (t) = mv_leader;
4482 /* Returns nonzero if P1 and P2 are equal. */
4485 gimple_canonical_type_eq (const void *p1, const void *p2)
4487 const_tree t1 = (const_tree) p1;
4488 const_tree t2 = (const_tree) p2;
4489 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4490 CONST_CAST_TREE (t2), GTC_DIAG);
4493 /* Register type T in the global type table gimple_types.
4494 If another type T', compatible with T, already existed in
4495 gimple_types then return T', otherwise return T. This is used by
4496 LTO to merge identical types read from different TUs. */
4499 gimple_register_canonical_type (tree t)
4504 gcc_assert (TYPE_P (t));
4506 if (TYPE_CANONICAL (t))
4507 return TYPE_CANONICAL (t);
4509 /* Always register the type itself first so that if it turns out
4510 to be the canonical type it will be the one we merge to as well. */
4511 t = gimple_register_type (t);
4513 /* Always register the main variant first. This is important so we
4514 pick up the non-typedef variants as canonical, otherwise we'll end
4515 up taking typedef ids for structure tags during comparison. */
4516 if (TYPE_MAIN_VARIANT (t) != t)
4517 gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
4519 if (gimple_canonical_types == NULL)
4520 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4521 gimple_canonical_type_eq, 0);
4523 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4525 && *(tree *)slot != t)
4527 tree new_type = (tree) *((tree *) slot);
4529 TYPE_CANONICAL (t) = new_type;
4534 TYPE_CANONICAL (t) = t;
4538 /* Also cache the canonical type in the non-leaders. */
4539 TYPE_CANONICAL (orig_t) = t;
4545 /* Show statistics on references to the global type table gimple_types. */
4548 print_gimple_types_stats (void)
4551 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4552 "%ld searches, %ld collisions (ratio: %f)\n",
4553 (long) htab_size (gimple_types),
4554 (long) htab_elements (gimple_types),
4555 (long) gimple_types->searches,
4556 (long) gimple_types->collisions,
4557 htab_collisions (gimple_types));
4559 fprintf (stderr, "GIMPLE type table is empty\n");
4560 if (type_hash_cache)
4561 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4562 "%ld searches, %ld collisions (ratio: %f)\n",
4563 (long) htab_size (type_hash_cache),
4564 (long) htab_elements (type_hash_cache),
4565 (long) type_hash_cache->searches,
4566 (long) type_hash_cache->collisions,
4567 htab_collisions (type_hash_cache));
4569 fprintf (stderr, "GIMPLE type hash table is empty\n");
4570 if (gimple_canonical_types)
4571 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4572 "%ld searches, %ld collisions (ratio: %f)\n",
4573 (long) htab_size (gimple_canonical_types),
4574 (long) htab_elements (gimple_canonical_types),
4575 (long) gimple_canonical_types->searches,
4576 (long) gimple_canonical_types->collisions,
4577 htab_collisions (gimple_canonical_types));
4579 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4580 if (canonical_type_hash_cache)
4581 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4582 "%ld searches, %ld collisions (ratio: %f)\n",
4583 (long) htab_size (canonical_type_hash_cache),
4584 (long) htab_elements (canonical_type_hash_cache),
4585 (long) canonical_type_hash_cache->searches,
4586 (long) canonical_type_hash_cache->collisions,
4587 htab_collisions (canonical_type_hash_cache));
4589 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4591 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
4592 "elements, %ld searches, %ld collisions (ratio: %f)\n",
4593 (long) htab_size (gtc_visited),
4594 (long) htab_elements (gtc_visited),
4595 (long) gtc_visited->searches,
4596 (long) gtc_visited->collisions,
4597 htab_collisions (gtc_visited));
4599 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4602 /* Free the gimple type hashtables used for LTO type merging. */
4605 free_gimple_type_tables (void)
4607 /* Last chance to print stats for the tables. */
4608 if (flag_lto_report)
4609 print_gimple_types_stats ();
4613 htab_delete (gimple_types);
4614 gimple_types = NULL;
4616 if (gimple_canonical_types)
4618 htab_delete (gimple_canonical_types);
4619 gimple_canonical_types = NULL;
4621 if (type_hash_cache)
4623 htab_delete (type_hash_cache);
4624 type_hash_cache = NULL;
4626 if (canonical_type_hash_cache)
4628 htab_delete (canonical_type_hash_cache);
4629 canonical_type_hash_cache = NULL;
4633 htab_delete (gtc_visited);
4634 obstack_free (>c_ob, NULL);
4637 gimple_type_leader = NULL;
4641 /* Return a type the same as TYPE except unsigned or
4642 signed according to UNSIGNEDP. */
4645 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4649 type1 = TYPE_MAIN_VARIANT (type);
4650 if (type1 == signed_char_type_node
4651 || type1 == char_type_node
4652 || type1 == unsigned_char_type_node)
4653 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4654 if (type1 == integer_type_node || type1 == unsigned_type_node)
4655 return unsignedp ? unsigned_type_node : integer_type_node;
4656 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4657 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4658 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4659 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4660 if (type1 == long_long_integer_type_node
4661 || type1 == long_long_unsigned_type_node)
4663 ? long_long_unsigned_type_node
4664 : long_long_integer_type_node;
4665 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4667 ? int128_unsigned_type_node
4668 : int128_integer_type_node;
4669 #if HOST_BITS_PER_WIDE_INT >= 64
4670 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4671 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4673 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4674 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4675 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4676 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4677 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4678 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4679 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4680 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4682 #define GIMPLE_FIXED_TYPES(NAME) \
4683 if (type1 == short_ ## NAME ## _type_node \
4684 || type1 == unsigned_short_ ## NAME ## _type_node) \
4685 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4686 : short_ ## NAME ## _type_node; \
4687 if (type1 == NAME ## _type_node \
4688 || type1 == unsigned_ ## NAME ## _type_node) \
4689 return unsignedp ? unsigned_ ## NAME ## _type_node \
4690 : NAME ## _type_node; \
4691 if (type1 == long_ ## NAME ## _type_node \
4692 || type1 == unsigned_long_ ## NAME ## _type_node) \
4693 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4694 : long_ ## NAME ## _type_node; \
4695 if (type1 == long_long_ ## NAME ## _type_node \
4696 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4697 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4698 : long_long_ ## NAME ## _type_node;
4700 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4701 if (type1 == NAME ## _type_node \
4702 || type1 == u ## NAME ## _type_node) \
4703 return unsignedp ? u ## NAME ## _type_node \
4704 : NAME ## _type_node;
4706 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4707 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4708 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4709 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4710 : sat_ ## short_ ## NAME ## _type_node; \
4711 if (type1 == sat_ ## NAME ## _type_node \
4712 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4713 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4714 : sat_ ## NAME ## _type_node; \
4715 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4716 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4717 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4718 : sat_ ## long_ ## NAME ## _type_node; \
4719 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4720 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4721 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4722 : sat_ ## long_long_ ## NAME ## _type_node;
4724 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4725 if (type1 == sat_ ## NAME ## _type_node \
4726 || type1 == sat_ ## u ## NAME ## _type_node) \
4727 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4728 : sat_ ## NAME ## _type_node;
4730 GIMPLE_FIXED_TYPES (fract);
4731 GIMPLE_FIXED_TYPES_SAT (fract);
4732 GIMPLE_FIXED_TYPES (accum);
4733 GIMPLE_FIXED_TYPES_SAT (accum);
4735 GIMPLE_FIXED_MODE_TYPES (qq);
4736 GIMPLE_FIXED_MODE_TYPES (hq);
4737 GIMPLE_FIXED_MODE_TYPES (sq);
4738 GIMPLE_FIXED_MODE_TYPES (dq);
4739 GIMPLE_FIXED_MODE_TYPES (tq);
4740 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4741 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4742 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4743 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4744 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4745 GIMPLE_FIXED_MODE_TYPES (ha);
4746 GIMPLE_FIXED_MODE_TYPES (sa);
4747 GIMPLE_FIXED_MODE_TYPES (da);
4748 GIMPLE_FIXED_MODE_TYPES (ta);
4749 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4750 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4751 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4752 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4754 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4755 the precision; they have precision set to match their range, but
4756 may use a wider mode to match an ABI. If we change modes, we may
4757 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4758 the precision as well, so as to yield correct results for
4759 bit-field types. C++ does not have these separate bit-field
4760 types, and producing a signed or unsigned variant of an
4761 ENUMERAL_TYPE may cause other problems as well. */
4762 if (!INTEGRAL_TYPE_P (type)
4763 || TYPE_UNSIGNED (type) == unsignedp)
4766 #define TYPE_OK(node) \
4767 (TYPE_MODE (type) == TYPE_MODE (node) \
4768 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4769 if (TYPE_OK (signed_char_type_node))
4770 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4771 if (TYPE_OK (integer_type_node))
4772 return unsignedp ? unsigned_type_node : integer_type_node;
4773 if (TYPE_OK (short_integer_type_node))
4774 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4775 if (TYPE_OK (long_integer_type_node))
4776 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4777 if (TYPE_OK (long_long_integer_type_node))
4779 ? long_long_unsigned_type_node
4780 : long_long_integer_type_node);
4781 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4783 ? int128_unsigned_type_node
4784 : int128_integer_type_node);
4786 #if HOST_BITS_PER_WIDE_INT >= 64
4787 if (TYPE_OK (intTI_type_node))
4788 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4790 if (TYPE_OK (intDI_type_node))
4791 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4792 if (TYPE_OK (intSI_type_node))
4793 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4794 if (TYPE_OK (intHI_type_node))
4795 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4796 if (TYPE_OK (intQI_type_node))
4797 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4799 #undef GIMPLE_FIXED_TYPES
4800 #undef GIMPLE_FIXED_MODE_TYPES
4801 #undef GIMPLE_FIXED_TYPES_SAT
4802 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4805 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4809 /* Return an unsigned type the same as TYPE in other respects. */
4812 gimple_unsigned_type (tree type)
4814 return gimple_signed_or_unsigned_type (true, type);
4818 /* Return a signed type the same as TYPE in other respects. */
4821 gimple_signed_type (tree type)
4823 return gimple_signed_or_unsigned_type (false, type);
4827 /* Return the typed-based alias set for T, which may be an expression
4828 or a type. Return -1 if we don't do anything special. */
4831 gimple_get_alias_set (tree t)
4835 /* Permit type-punning when accessing a union, provided the access
4836 is directly through the union. For example, this code does not
4837 permit taking the address of a union member and then storing
4838 through it. Even the type-punning allowed here is a GCC
4839 extension, albeit a common and useful one; the C standard says
4840 that such accesses have implementation-defined behavior. */
4842 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4843 u = TREE_OPERAND (u, 0))
4844 if (TREE_CODE (u) == COMPONENT_REF
4845 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4848 /* That's all the expressions we handle specially. */
4852 /* For convenience, follow the C standard when dealing with
4853 character types. Any object may be accessed via an lvalue that
4854 has character type. */
4855 if (t == char_type_node
4856 || t == signed_char_type_node
4857 || t == unsigned_char_type_node)
4860 /* Allow aliasing between signed and unsigned variants of the same
4861 type. We treat the signed variant as canonical. */
4862 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4864 tree t1 = gimple_signed_type (t);
4866 /* t1 == t can happen for boolean nodes which are always unsigned. */
4868 return get_alias_set (t1);
4875 /* Data structure used to count the number of dereferences to PTR
4876 inside an expression. */
4880 unsigned num_stores;
4884 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4885 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4888 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4890 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4891 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4893 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4894 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4895 the address of 'fld' as 'ptr + offsetof(fld)'. */
4896 if (TREE_CODE (*tp) == ADDR_EXPR)
4902 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
4905 count_p->num_stores++;
4907 count_p->num_loads++;
4913 /* Count the number of direct and indirect uses for pointer PTR in
4914 statement STMT. The number of direct uses is stored in
4915 *NUM_USES_P. Indirect references are counted separately depending
4916 on whether they are store or load operations. The counts are
4917 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4920 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4921 unsigned *num_loads_p, unsigned *num_stores_p)
4930 /* Find out the total number of uses of PTR in STMT. */
4931 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4935 /* Now count the number of indirect references to PTR. This is
4936 truly awful, but we don't have much choice. There are no parent
4937 pointers inside INDIRECT_REFs, so an expression like
4938 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4939 find all the indirect and direct uses of x_1 inside. The only
4940 shortcut we can take is the fact that GIMPLE only allows
4941 INDIRECT_REFs inside the expressions below. */
4942 if (is_gimple_assign (stmt)
4943 || gimple_code (stmt) == GIMPLE_RETURN
4944 || gimple_code (stmt) == GIMPLE_ASM
4945 || is_gimple_call (stmt))
4947 struct walk_stmt_info wi;
4948 struct count_ptr_d count;
4951 count.num_stores = 0;
4952 count.num_loads = 0;
4954 memset (&wi, 0, sizeof (wi));
4956 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4958 *num_stores_p = count.num_stores;
4959 *num_loads_p = count.num_loads;
4962 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4965 /* From a tree operand OP return the base of a load or store operation
4966 or NULL_TREE if OP is not a load or a store. */
4969 get_base_loadstore (tree op)
4971 while (handled_component_p (op))
4972 op = TREE_OPERAND (op, 0);
4974 || INDIRECT_REF_P (op)
4975 || TREE_CODE (op) == MEM_REF
4976 || TREE_CODE (op) == TARGET_MEM_REF)
4981 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4982 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4983 passing the STMT, the base of the operand and DATA to it. The base
4984 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4985 or the argument of an address expression.
4986 Returns the results of these callbacks or'ed. */
4989 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4990 bool (*visit_load)(gimple, tree, void *),
4991 bool (*visit_store)(gimple, tree, void *),
4992 bool (*visit_addr)(gimple, tree, void *))
4996 if (gimple_assign_single_p (stmt))
5001 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5003 ret |= visit_store (stmt, lhs, data);
5005 rhs = gimple_assign_rhs1 (stmt);
5006 while (handled_component_p (rhs))
5007 rhs = TREE_OPERAND (rhs, 0);
5010 if (TREE_CODE (rhs) == ADDR_EXPR)
5011 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5012 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5013 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5014 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5015 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5016 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5017 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5019 lhs = gimple_assign_lhs (stmt);
5020 if (TREE_CODE (lhs) == TARGET_MEM_REF
5021 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5022 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5026 rhs = get_base_loadstore (rhs);
5028 ret |= visit_load (stmt, rhs, data);
5032 && (is_gimple_assign (stmt)
5033 || gimple_code (stmt) == GIMPLE_COND))
5035 for (i = 0; i < gimple_num_ops (stmt); ++i)
5036 if (gimple_op (stmt, i)
5037 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
5038 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
5040 else if (is_gimple_call (stmt))
5044 tree lhs = gimple_call_lhs (stmt);
5047 lhs = get_base_loadstore (lhs);
5049 ret |= visit_store (stmt, lhs, data);
5052 if (visit_load || visit_addr)
5053 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5055 tree rhs = gimple_call_arg (stmt, i);
5057 && TREE_CODE (rhs) == ADDR_EXPR)
5058 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5059 else if (visit_load)
5061 rhs = get_base_loadstore (rhs);
5063 ret |= visit_load (stmt, rhs, data);
5067 && gimple_call_chain (stmt)
5068 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5069 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5072 && gimple_call_return_slot_opt_p (stmt)
5073 && gimple_call_lhs (stmt) != NULL_TREE
5074 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5075 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5077 else if (gimple_code (stmt) == GIMPLE_ASM)
5080 const char *constraint;
5081 const char **oconstraints;
5082 bool allows_mem, allows_reg, is_inout;
5083 noutputs = gimple_asm_noutputs (stmt);
5084 oconstraints = XALLOCAVEC (const char *, noutputs);
5085 if (visit_store || visit_addr)
5086 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5088 tree link = gimple_asm_output_op (stmt, i);
5089 tree op = get_base_loadstore (TREE_VALUE (link));
5090 if (op && visit_store)
5091 ret |= visit_store (stmt, op, data);
5094 constraint = TREE_STRING_POINTER
5095 (TREE_VALUE (TREE_PURPOSE (link)));
5096 oconstraints[i] = constraint;
5097 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5098 &allows_reg, &is_inout);
5099 if (op && !allows_reg && allows_mem)
5100 ret |= visit_addr (stmt, op, data);
5103 if (visit_load || visit_addr)
5104 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5106 tree link = gimple_asm_input_op (stmt, i);
5107 tree op = TREE_VALUE (link);
5109 && TREE_CODE (op) == ADDR_EXPR)
5110 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5111 else if (visit_load || visit_addr)
5113 op = get_base_loadstore (op);
5117 ret |= visit_load (stmt, op, data);
5120 constraint = TREE_STRING_POINTER
5121 (TREE_VALUE (TREE_PURPOSE (link)));
5122 parse_input_constraint (&constraint, 0, 0, noutputs,
5124 &allows_mem, &allows_reg);
5125 if (!allows_reg && allows_mem)
5126 ret |= visit_addr (stmt, op, data);
5132 else if (gimple_code (stmt) == GIMPLE_RETURN)
5134 tree op = gimple_return_retval (stmt);
5138 && TREE_CODE (op) == ADDR_EXPR)
5139 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5140 else if (visit_load)
5142 op = get_base_loadstore (op);
5144 ret |= visit_load (stmt, op, data);
5149 && gimple_code (stmt) == GIMPLE_PHI)
5151 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5153 tree op = PHI_ARG_DEF (stmt, i);
5154 if (TREE_CODE (op) == ADDR_EXPR)
5155 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5162 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5163 should make a faster clone for this case. */
5166 walk_stmt_load_store_ops (gimple stmt, void *data,
5167 bool (*visit_load)(gimple, tree, void *),
5168 bool (*visit_store)(gimple, tree, void *))
5170 return walk_stmt_load_store_addr_ops (stmt, data,
5171 visit_load, visit_store, NULL);
5174 /* Helper for gimple_ior_addresses_taken_1. */
5177 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5178 tree addr, void *data)
5180 bitmap addresses_taken = (bitmap)data;
5181 addr = get_base_address (addr);
5185 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5191 /* Set the bit for the uid of all decls that have their address taken
5192 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5193 were any in this stmt. */
5196 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5198 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5199 gimple_ior_addresses_taken_1);
5203 /* Return a printable name for symbol DECL. */
5206 gimple_decl_printable_name (tree decl, int verbosity)
5208 if (!DECL_NAME (decl))
5211 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5213 const char *str, *mangled_str;
5214 int dmgl_opts = DMGL_NO_OPTS;
5218 dmgl_opts = DMGL_VERBOSE
5222 if (TREE_CODE (decl) == FUNCTION_DECL)
5223 dmgl_opts |= DMGL_PARAMS;
5226 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5227 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5228 return (str) ? str : mangled_str;
5231 return IDENTIFIER_POINTER (DECL_NAME (decl));
5234 /* Return true when STMT is builtins call to CODE. */
5237 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5240 return (is_gimple_call (stmt)
5241 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5242 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5243 && DECL_FUNCTION_CODE (fndecl) == code);
5246 /* Return true if STMT clobbers memory. STMT is required to be a
5250 gimple_asm_clobbers_memory_p (const_gimple stmt)
5254 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5256 tree op = gimple_asm_clobber_op (stmt, i);
5257 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5263 #include "gt-gimple.h"