1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
53 /* Global type comparison cache. This is by TYPE_UID for space efficiency
54 and thus cannot use and does not need GC. */
55 static htab_t gtc_visited;
56 static struct obstack gtc_ob;
58 /* All the tuples have their operand vector (if present) at the very bottom
59 of the structure. Therefore, the offset required to find the
60 operands vector the size of the structure minus the size of the 1
61 element tree array at the end (see gimple_ops). */
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
63 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
64 EXPORTED_CONST size_t gimple_ops_offset_[] = {
65 #include "gsstruct.def"
69 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
70 static const size_t gsstruct_code_size[] = {
71 #include "gsstruct.def"
75 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
76 const char *const gimple_code_name[] = {
81 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
82 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
87 #ifdef GATHER_STATISTICS
90 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
91 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
93 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
94 static const char * const gimple_alloc_kind_names[] = {
102 #endif /* GATHER_STATISTICS */
104 /* A cache of gimple_seq objects. Sequences are created and destroyed
105 fairly often during gimplification. */
106 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
108 /* Private API manipulation functions shared only with some
110 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
111 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
113 /* Gimple tuple constructors.
114 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
115 be passed a NULL to start with an empty sequence. */
117 /* Set the code for statement G to CODE. */
120 gimple_set_code (gimple g, enum gimple_code code)
122 g->gsbase.code = code;
125 /* Return the number of bytes needed to hold a GIMPLE statement with
129 gimple_size (enum gimple_code code)
131 return gsstruct_code_size[gss_for_code (code)];
134 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
138 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
143 size = gimple_size (code);
145 size += sizeof (tree) * (num_ops - 1);
147 #ifdef GATHER_STATISTICS
149 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
150 gimple_alloc_counts[(int) kind]++;
151 gimple_alloc_sizes[(int) kind] += size;
155 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
156 gimple_set_code (stmt, code);
157 gimple_set_num_ops (stmt, num_ops);
159 /* Do not call gimple_set_modified here as it has other side
160 effects and this tuple is still not completely built. */
161 stmt->gsbase.modified = 1;
166 /* Set SUBCODE to be the code of the expression computed by statement G. */
169 gimple_set_subcode (gimple g, unsigned subcode)
171 /* We only have 16 bits for the RHS code. Assert that we are not
173 gcc_assert (subcode < (1 << 16));
174 g->gsbase.subcode = subcode;
179 /* Build a tuple with operands. CODE is the statement to build (which
180 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
181 for the new tuple. NUM_OPS is the number of operands to allocate. */
183 #define gimple_build_with_ops(c, s, n) \
184 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
187 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
188 unsigned num_ops MEM_STAT_DECL)
190 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
191 gimple_set_subcode (s, subcode);
197 /* Build a GIMPLE_RETURN statement returning RETVAL. */
200 gimple_build_return (tree retval)
202 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
204 gimple_return_set_retval (s, retval);
208 /* Reset alias information on call S. */
211 gimple_call_reset_alias_info (gimple s)
213 if (gimple_call_flags (s) & ECF_CONST)
214 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
216 pt_solution_reset (gimple_call_use_set (s));
217 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
218 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
220 pt_solution_reset (gimple_call_clobber_set (s));
223 /* Helper for gimple_build_call, gimple_build_call_vec and
224 gimple_build_call_from_tree. Build the basic components of a
225 GIMPLE_CALL statement to function FN with NARGS arguments. */
228 gimple_build_call_1 (tree fn, unsigned nargs)
230 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
231 if (TREE_CODE (fn) == FUNCTION_DECL)
232 fn = build_fold_addr_expr (fn);
233 gimple_set_op (s, 1, fn);
234 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235 gimple_call_reset_alias_info (s);
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241 specified in vector ARGS. */
244 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
247 unsigned nargs = VEC_length (tree, args);
248 gimple call = gimple_build_call_1 (fn, nargs);
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
257 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
258 arguments. The ... are the arguments. */
261 gimple_build_call (tree fn, unsigned nargs, ...)
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
269 call = gimple_build_call_1 (fn, nargs);
271 va_start (ap, nargs);
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
280 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
281 assumed to be in GIMPLE form already. Minimal checking is done of
285 gimple_build_call_from_tree (tree t)
289 tree fndecl = get_callee_fndecl (t);
291 gcc_assert (TREE_CODE (t) == CALL_EXPR);
293 nargs = call_expr_nargs (t);
294 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
296 for (i = 0; i < nargs; i++)
297 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
299 gimple_set_block (call, TREE_BLOCK (t));
301 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
302 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
303 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
304 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
305 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
307 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
308 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
309 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
311 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
312 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
313 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
314 gimple_set_no_warning (call, TREE_NO_WARNING (t));
320 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
321 *OP1_P, *OP2_P and *OP3_P respectively. */
324 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
325 tree *op2_p, tree *op3_p)
327 enum gimple_rhs_class grhs_class;
329 *subcode_p = TREE_CODE (expr);
330 grhs_class = get_gimple_rhs_class (*subcode_p);
332 if (grhs_class == GIMPLE_TERNARY_RHS)
334 *op1_p = TREE_OPERAND (expr, 0);
335 *op2_p = TREE_OPERAND (expr, 1);
336 *op3_p = TREE_OPERAND (expr, 2);
338 else if (grhs_class == GIMPLE_BINARY_RHS)
340 *op1_p = TREE_OPERAND (expr, 0);
341 *op2_p = TREE_OPERAND (expr, 1);
344 else if (grhs_class == GIMPLE_UNARY_RHS)
346 *op1_p = TREE_OPERAND (expr, 0);
350 else if (grhs_class == GIMPLE_SINGLE_RHS)
361 /* Build a GIMPLE_ASSIGN statement.
363 LHS of the assignment.
364 RHS of the assignment which can be unary or binary. */
367 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
369 enum tree_code subcode;
372 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
373 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
378 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
379 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
380 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
383 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
384 tree op2, tree op3 MEM_STAT_DECL)
389 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
391 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
393 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
395 gimple_assign_set_lhs (p, lhs);
396 gimple_assign_set_rhs1 (p, op1);
399 gcc_assert (num_ops > 2);
400 gimple_assign_set_rhs2 (p, op2);
405 gcc_assert (num_ops > 3);
406 gimple_assign_set_rhs3 (p, op3);
413 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
415 DST/SRC are the destination and source respectively. You can pass
416 ungimplified trees in DST or SRC, in which case they will be
417 converted to a gimple operand if necessary.
419 This function returns the newly created GIMPLE_ASSIGN tuple. */
422 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
424 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
425 gimplify_and_add (t, seq_p);
427 return gimple_seq_last_stmt (*seq_p);
431 /* Build a GIMPLE_COND statement.
433 PRED is the condition used to compare LHS and the RHS.
434 T_LABEL is the label to jump to if the condition is true.
435 F_LABEL is the label to jump to otherwise. */
438 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
439 tree t_label, tree f_label)
443 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
444 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
445 gimple_cond_set_lhs (p, lhs);
446 gimple_cond_set_rhs (p, rhs);
447 gimple_cond_set_true_label (p, t_label);
448 gimple_cond_set_false_label (p, f_label);
453 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
456 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
457 tree *lhs_p, tree *rhs_p)
459 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
460 || TREE_CODE (cond) == TRUTH_NOT_EXPR
461 || is_gimple_min_invariant (cond)
462 || SSA_VAR_P (cond));
464 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
466 /* Canonicalize conditionals of the form 'if (!VAL)'. */
467 if (*code_p == TRUTH_NOT_EXPR)
470 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
471 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
473 /* Canonicalize conditionals of the form 'if (VAL)' */
474 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
477 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
478 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
483 /* Build a GIMPLE_COND statement from the conditional expression tree
484 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
487 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
492 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
493 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
496 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
497 boolean expression tree COND. */
500 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
505 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
506 gimple_cond_set_condition (stmt, code, lhs, rhs);
509 /* Build a GIMPLE_LABEL statement for LABEL. */
512 gimple_build_label (tree label)
514 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
515 gimple_label_set_label (p, label);
519 /* Build a GIMPLE_GOTO statement to label DEST. */
522 gimple_build_goto (tree dest)
524 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
525 gimple_goto_set_dest (p, dest);
530 /* Build a GIMPLE_NOP statement. */
533 gimple_build_nop (void)
535 return gimple_alloc (GIMPLE_NOP, 0);
539 /* Build a GIMPLE_BIND statement.
540 VARS are the variables in BODY.
541 BLOCK is the containing block. */
544 gimple_build_bind (tree vars, gimple_seq body, tree block)
546 gimple p = gimple_alloc (GIMPLE_BIND, 0);
547 gimple_bind_set_vars (p, vars);
549 gimple_bind_set_body (p, body);
551 gimple_bind_set_block (p, block);
555 /* Helper function to set the simple fields of a asm stmt.
557 STRING is a pointer to a string that is the asm blocks assembly code.
558 NINPUT is the number of register inputs.
559 NOUTPUT is the number of register outputs.
560 NCLOBBERS is the number of clobbered registers.
564 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
565 unsigned nclobbers, unsigned nlabels)
568 int size = strlen (string);
570 /* ASMs with labels cannot have outputs. This should have been
571 enforced by the front end. */
572 gcc_assert (nlabels == 0 || noutputs == 0);
574 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
575 ninputs + noutputs + nclobbers + nlabels);
577 p->gimple_asm.ni = ninputs;
578 p->gimple_asm.no = noutputs;
579 p->gimple_asm.nc = nclobbers;
580 p->gimple_asm.nl = nlabels;
581 p->gimple_asm.string = ggc_alloc_string (string, size);
583 #ifdef GATHER_STATISTICS
584 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
590 /* Build a GIMPLE_ASM statement.
592 STRING is the assembly code.
593 NINPUT is the number of register inputs.
594 NOUTPUT is the number of register outputs.
595 NCLOBBERS is the number of clobbered registers.
596 INPUTS is a vector of the input register parameters.
597 OUTPUTS is a vector of the output register parameters.
598 CLOBBERS is a vector of the clobbered register parameters.
599 LABELS is a vector of destination labels. */
602 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
603 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
604 VEC(tree,gc)* labels)
609 p = gimple_build_asm_1 (string,
610 VEC_length (tree, inputs),
611 VEC_length (tree, outputs),
612 VEC_length (tree, clobbers),
613 VEC_length (tree, labels));
615 for (i = 0; i < VEC_length (tree, inputs); i++)
616 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
618 for (i = 0; i < VEC_length (tree, outputs); i++)
619 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
621 for (i = 0; i < VEC_length (tree, clobbers); i++)
622 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
624 for (i = 0; i < VEC_length (tree, labels); i++)
625 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
630 /* Build a GIMPLE_CATCH statement.
632 TYPES are the catch types.
633 HANDLER is the exception handler. */
636 gimple_build_catch (tree types, gimple_seq handler)
638 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
639 gimple_catch_set_types (p, types);
641 gimple_catch_set_handler (p, handler);
646 /* Build a GIMPLE_EH_FILTER statement.
648 TYPES are the filter's types.
649 FAILURE is the filter's failure action. */
652 gimple_build_eh_filter (tree types, gimple_seq failure)
654 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
655 gimple_eh_filter_set_types (p, types);
657 gimple_eh_filter_set_failure (p, failure);
662 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
665 gimple_build_eh_must_not_throw (tree decl)
667 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
669 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
670 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
671 gimple_eh_must_not_throw_set_fndecl (p, decl);
676 /* Build a GIMPLE_TRY statement.
678 EVAL is the expression to evaluate.
679 CLEANUP is the cleanup expression.
680 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
681 whether this is a try/catch or a try/finally respectively. */
684 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
685 enum gimple_try_flags kind)
689 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
690 p = gimple_alloc (GIMPLE_TRY, 0);
691 gimple_set_subcode (p, kind);
693 gimple_try_set_eval (p, eval);
695 gimple_try_set_cleanup (p, cleanup);
700 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
702 CLEANUP is the cleanup expression. */
705 gimple_build_wce (gimple_seq cleanup)
707 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
709 gimple_wce_set_cleanup (p, cleanup);
715 /* Build a GIMPLE_RESX statement. */
718 gimple_build_resx (int region)
720 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
721 p->gimple_eh_ctrl.region = region;
726 /* The helper for constructing a gimple switch statement.
727 INDEX is the switch's index.
728 NLABELS is the number of labels in the switch excluding the default.
729 DEFAULT_LABEL is the default label for the switch statement. */
732 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
734 /* nlabels + 1 default label + 1 index. */
735 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
736 1 + (default_label != NULL) + nlabels);
737 gimple_switch_set_index (p, index);
739 gimple_switch_set_default_label (p, default_label);
744 /* Build a GIMPLE_SWITCH statement.
746 INDEX is the switch's index.
747 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
748 ... are the labels excluding the default. */
751 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
755 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
757 /* Store the rest of the labels. */
758 va_start (al, default_label);
759 offset = (default_label != NULL);
760 for (i = 0; i < nlabels; i++)
761 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
768 /* Build a GIMPLE_SWITCH statement.
770 INDEX is the switch's index.
771 DEFAULT_LABEL is the default label
772 ARGS is a vector of labels excluding the default. */
775 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
777 unsigned i, offset, nlabels = VEC_length (tree, args);
778 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
780 /* Copy the labels from the vector to the switch statement. */
781 offset = (default_label != NULL);
782 for (i = 0; i < nlabels; i++)
783 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
788 /* Build a GIMPLE_EH_DISPATCH statement. */
791 gimple_build_eh_dispatch (int region)
793 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
794 p->gimple_eh_ctrl.region = region;
798 /* Build a new GIMPLE_DEBUG_BIND statement.
800 VAR is bound to VALUE; block and location are taken from STMT. */
803 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
805 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
806 (unsigned)GIMPLE_DEBUG_BIND, 2
809 gimple_debug_bind_set_var (p, var);
810 gimple_debug_bind_set_value (p, value);
813 gimple_set_block (p, gimple_block (stmt));
814 gimple_set_location (p, gimple_location (stmt));
821 /* Build a GIMPLE_OMP_CRITICAL statement.
823 BODY is the sequence of statements for which only one thread can execute.
824 NAME is optional identifier for this critical block. */
827 gimple_build_omp_critical (gimple_seq body, tree name)
829 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
830 gimple_omp_critical_set_name (p, name);
832 gimple_omp_set_body (p, body);
837 /* Build a GIMPLE_OMP_FOR statement.
839 BODY is sequence of statements inside the for loop.
840 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
841 lastprivate, reductions, ordered, schedule, and nowait.
842 COLLAPSE is the collapse count.
843 PRE_BODY is the sequence of statements that are loop invariant. */
846 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
849 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
851 gimple_omp_set_body (p, body);
852 gimple_omp_for_set_clauses (p, clauses);
853 p->gimple_omp_for.collapse = collapse;
854 p->gimple_omp_for.iter
855 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
857 gimple_omp_for_set_pre_body (p, pre_body);
863 /* Build a GIMPLE_OMP_PARALLEL statement.
865 BODY is sequence of statements which are executed in parallel.
866 CLAUSES, are the OMP parallel construct's clauses.
867 CHILD_FN is the function created for the parallel threads to execute.
868 DATA_ARG are the shared data argument(s). */
871 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
874 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
876 gimple_omp_set_body (p, body);
877 gimple_omp_parallel_set_clauses (p, clauses);
878 gimple_omp_parallel_set_child_fn (p, child_fn);
879 gimple_omp_parallel_set_data_arg (p, data_arg);
885 /* Build a GIMPLE_OMP_TASK statement.
887 BODY is sequence of statements which are executed by the explicit task.
888 CLAUSES, are the OMP parallel construct's clauses.
889 CHILD_FN is the function created for the parallel threads to execute.
890 DATA_ARG are the shared data argument(s).
891 COPY_FN is the optional function for firstprivate initialization.
892 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
895 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
896 tree data_arg, tree copy_fn, tree arg_size,
899 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
901 gimple_omp_set_body (p, body);
902 gimple_omp_task_set_clauses (p, clauses);
903 gimple_omp_task_set_child_fn (p, child_fn);
904 gimple_omp_task_set_data_arg (p, data_arg);
905 gimple_omp_task_set_copy_fn (p, copy_fn);
906 gimple_omp_task_set_arg_size (p, arg_size);
907 gimple_omp_task_set_arg_align (p, arg_align);
913 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
915 BODY is the sequence of statements in the section. */
918 gimple_build_omp_section (gimple_seq body)
920 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
922 gimple_omp_set_body (p, body);
928 /* Build a GIMPLE_OMP_MASTER statement.
930 BODY is the sequence of statements to be executed by just the master. */
933 gimple_build_omp_master (gimple_seq body)
935 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
937 gimple_omp_set_body (p, body);
943 /* Build a GIMPLE_OMP_CONTINUE statement.
945 CONTROL_DEF is the definition of the control variable.
946 CONTROL_USE is the use of the control variable. */
949 gimple_build_omp_continue (tree control_def, tree control_use)
951 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
952 gimple_omp_continue_set_control_def (p, control_def);
953 gimple_omp_continue_set_control_use (p, control_use);
957 /* Build a GIMPLE_OMP_ORDERED statement.
959 BODY is the sequence of statements inside a loop that will executed in
963 gimple_build_omp_ordered (gimple_seq body)
965 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
967 gimple_omp_set_body (p, body);
973 /* Build a GIMPLE_OMP_RETURN statement.
974 WAIT_P is true if this is a non-waiting return. */
977 gimple_build_omp_return (bool wait_p)
979 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
981 gimple_omp_return_set_nowait (p);
987 /* Build a GIMPLE_OMP_SECTIONS statement.
989 BODY is a sequence of section statements.
990 CLAUSES are any of the OMP sections contsruct's clauses: private,
991 firstprivate, lastprivate, reduction, and nowait. */
994 gimple_build_omp_sections (gimple_seq body, tree clauses)
996 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
998 gimple_omp_set_body (p, body);
999 gimple_omp_sections_set_clauses (p, clauses);
1005 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1008 gimple_build_omp_sections_switch (void)
1010 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1014 /* Build a GIMPLE_OMP_SINGLE statement.
1016 BODY is the sequence of statements that will be executed once.
1017 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1018 copyprivate, nowait. */
1021 gimple_build_omp_single (gimple_seq body, tree clauses)
1023 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1025 gimple_omp_set_body (p, body);
1026 gimple_omp_single_set_clauses (p, clauses);
1032 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1035 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1037 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1038 gimple_omp_atomic_load_set_lhs (p, lhs);
1039 gimple_omp_atomic_load_set_rhs (p, rhs);
1043 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1045 VAL is the value we are storing. */
1048 gimple_build_omp_atomic_store (tree val)
1050 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1051 gimple_omp_atomic_store_set_val (p, val);
1055 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1056 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1059 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1061 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1062 /* Ensure all the predictors fit into the lower bits of the subcode. */
1063 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1064 gimple_predict_set_predictor (p, predictor);
1065 gimple_predict_set_outcome (p, outcome);
1069 #if defined ENABLE_GIMPLE_CHECKING
1070 /* Complain of a gimple type mismatch and die. */
1073 gimple_check_failed (const_gimple gs, const char *file, int line,
1074 const char *function, enum gimple_code code,
1075 enum tree_code subcode)
1077 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1078 gimple_code_name[code],
1079 tree_code_name[subcode],
1080 gimple_code_name[gimple_code (gs)],
1081 gs->gsbase.subcode > 0
1082 ? tree_code_name[gs->gsbase.subcode]
1084 function, trim_filename (file), line);
1086 #endif /* ENABLE_GIMPLE_CHECKING */
1089 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1090 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1094 gimple_seq_alloc (void)
1096 gimple_seq seq = gimple_seq_cache;
1099 gimple_seq_cache = gimple_seq_cache->next_free;
1100 gcc_assert (gimple_seq_cache != seq);
1101 memset (seq, 0, sizeof (*seq));
1105 seq = ggc_alloc_cleared_gimple_seq_d ();
1106 #ifdef GATHER_STATISTICS
1107 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1108 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1115 /* Return SEQ to the free pool of GIMPLE sequences. */
1118 gimple_seq_free (gimple_seq seq)
1123 gcc_assert (gimple_seq_first (seq) == NULL);
1124 gcc_assert (gimple_seq_last (seq) == NULL);
1126 /* If this triggers, it's a sign that the same list is being freed
1128 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1130 /* Add SEQ to the pool of free sequences. */
1131 seq->next_free = gimple_seq_cache;
1132 gimple_seq_cache = seq;
1136 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1137 *SEQ_P is NULL, a new sequence is allocated. */
1140 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1142 gimple_stmt_iterator si;
1148 *seq_p = gimple_seq_alloc ();
1150 si = gsi_last (*seq_p);
1151 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1155 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1156 NULL, a new sequence is allocated. */
1159 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1161 gimple_stmt_iterator si;
1167 *dst_p = gimple_seq_alloc ();
1169 si = gsi_last (*dst_p);
1170 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1174 /* Helper function of empty_body_p. Return true if STMT is an empty
1178 empty_stmt_p (gimple stmt)
1180 if (gimple_code (stmt) == GIMPLE_NOP)
1182 if (gimple_code (stmt) == GIMPLE_BIND)
1183 return empty_body_p (gimple_bind_body (stmt));
1188 /* Return true if BODY contains nothing but empty statements. */
1191 empty_body_p (gimple_seq body)
1193 gimple_stmt_iterator i;
1195 if (gimple_seq_empty_p (body))
1197 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1198 if (!empty_stmt_p (gsi_stmt (i))
1199 && !is_gimple_debug (gsi_stmt (i)))
1206 /* Perform a deep copy of sequence SRC and return the result. */
1209 gimple_seq_copy (gimple_seq src)
1211 gimple_stmt_iterator gsi;
1212 gimple_seq new_seq = gimple_seq_alloc ();
1215 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1217 stmt = gimple_copy (gsi_stmt (gsi));
1218 gimple_seq_add_stmt (&new_seq, stmt);
1225 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1226 on each one. WI is as in walk_gimple_stmt.
1228 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1229 value is stored in WI->CALLBACK_RESULT and the statement that
1230 produced the value is returned.
1232 Otherwise, all the statements are walked and NULL returned. */
1235 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1236 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1238 gimple_stmt_iterator gsi;
1240 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1242 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1245 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1248 wi->callback_result = ret;
1249 return gsi_stmt (gsi);
1254 wi->callback_result = NULL_TREE;
1260 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1263 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1264 struct walk_stmt_info *wi)
1268 const char **oconstraints;
1270 const char *constraint;
1271 bool allows_mem, allows_reg, is_inout;
1273 noutputs = gimple_asm_noutputs (stmt);
1274 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1279 for (i = 0; i < noutputs; i++)
1281 op = gimple_asm_output_op (stmt, i);
1282 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1283 oconstraints[i] = constraint;
1284 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1287 wi->val_only = (allows_reg || !allows_mem);
1288 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1293 n = gimple_asm_ninputs (stmt);
1294 for (i = 0; i < n; i++)
1296 op = gimple_asm_input_op (stmt, i);
1297 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1298 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1299 oconstraints, &allows_mem, &allows_reg);
1302 wi->val_only = (allows_reg || !allows_mem);
1303 /* Although input "m" is not really a LHS, we need a lvalue. */
1304 wi->is_lhs = !wi->val_only;
1306 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1314 wi->val_only = true;
1317 n = gimple_asm_nlabels (stmt);
1318 for (i = 0; i < n; i++)
1320 op = gimple_asm_label_op (stmt, i);
1321 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1330 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1331 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1333 CALLBACK_OP is called on each operand of STMT via walk_tree.
1334 Additional parameters to walk_tree must be stored in WI. For each operand
1335 OP, walk_tree is called as:
1337 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1339 If CALLBACK_OP returns non-NULL for an operand, the remaining
1340 operands are not scanned.
1342 The return value is that returned by the last call to walk_tree, or
1343 NULL_TREE if no CALLBACK_OP is specified. */
1346 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1347 struct walk_stmt_info *wi)
1349 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1351 tree ret = NULL_TREE;
1353 switch (gimple_code (stmt))
1356 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1357 is a register variable, we may use a COMPONENT_REF on the RHS. */
1360 tree lhs = gimple_assign_lhs (stmt);
1362 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1363 || !gimple_assign_single_p (stmt);
1366 for (i = 1; i < gimple_num_ops (stmt); i++)
1368 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1374 /* Walk the LHS. If the RHS is appropriate for a memory, we
1375 may use a COMPONENT_REF on the LHS. */
1378 /* If the RHS has more than 1 operand, it is not appropriate
1380 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1381 || !gimple_assign_single_p (stmt);
1385 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1391 wi->val_only = true;
1400 wi->val_only = true;
1403 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1407 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1411 for (i = 0; i < gimple_call_num_args (stmt); i++)
1414 wi->val_only = is_gimple_reg_type (gimple_call_arg (stmt, i));
1415 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1421 if (gimple_call_lhs (stmt))
1426 wi->val_only = is_gimple_reg_type (gimple_call_lhs (stmt));
1429 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1437 wi->val_only = true;
1442 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1448 case GIMPLE_EH_FILTER:
1449 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1456 ret = walk_gimple_asm (stmt, callback_op, wi);
1461 case GIMPLE_OMP_CONTINUE:
1462 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1463 callback_op, wi, pset);
1467 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1468 callback_op, wi, pset);
1473 case GIMPLE_OMP_CRITICAL:
1474 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1480 case GIMPLE_OMP_FOR:
1481 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1485 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1487 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1491 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1495 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1499 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1506 case GIMPLE_OMP_PARALLEL:
1507 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1511 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1515 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1521 case GIMPLE_OMP_TASK:
1522 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1526 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1530 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1534 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1538 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1542 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1548 case GIMPLE_OMP_SECTIONS:
1549 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1554 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1561 case GIMPLE_OMP_SINGLE:
1562 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1568 case GIMPLE_OMP_ATOMIC_LOAD:
1569 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1574 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1580 case GIMPLE_OMP_ATOMIC_STORE:
1581 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1587 /* Tuples that do not have operands. */
1590 case GIMPLE_OMP_RETURN:
1591 case GIMPLE_PREDICT:
1596 enum gimple_statement_structure_enum gss;
1597 gss = gimple_statement_structure (stmt);
1598 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1599 for (i = 0; i < gimple_num_ops (stmt); i++)
1601 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1613 /* Walk the current statement in GSI (optionally using traversal state
1614 stored in WI). If WI is NULL, no state is kept during traversal.
1615 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1616 that it has handled all the operands of the statement, its return
1617 value is returned. Otherwise, the return value from CALLBACK_STMT
1618 is discarded and its operands are scanned.
1620 If CALLBACK_STMT is NULL or it didn't handle the operands,
1621 CALLBACK_OP is called on each operand of the statement via
1622 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1623 operand, the remaining operands are not scanned. In this case, the
1624 return value from CALLBACK_OP is returned.
1626 In any other case, NULL_TREE is returned. */
1629 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1630 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1634 gimple stmt = gsi_stmt (*gsi);
1639 if (wi && wi->want_locations && gimple_has_location (stmt))
1640 input_location = gimple_location (stmt);
1644 /* Invoke the statement callback. Return if the callback handled
1645 all of STMT operands by itself. */
1648 bool handled_ops = false;
1649 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1653 /* If CALLBACK_STMT did not handle operands, it should not have
1654 a value to return. */
1655 gcc_assert (tree_ret == NULL);
1657 /* Re-read stmt in case the callback changed it. */
1658 stmt = gsi_stmt (*gsi);
1661 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1664 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1669 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1670 switch (gimple_code (stmt))
1673 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1676 return wi->callback_result;
1680 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1683 return wi->callback_result;
1686 case GIMPLE_EH_FILTER:
1687 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1690 return wi->callback_result;
1694 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1697 return wi->callback_result;
1699 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1702 return wi->callback_result;
1705 case GIMPLE_OMP_FOR:
1706 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1709 return wi->callback_result;
1712 case GIMPLE_OMP_CRITICAL:
1713 case GIMPLE_OMP_MASTER:
1714 case GIMPLE_OMP_ORDERED:
1715 case GIMPLE_OMP_SECTION:
1716 case GIMPLE_OMP_PARALLEL:
1717 case GIMPLE_OMP_TASK:
1718 case GIMPLE_OMP_SECTIONS:
1719 case GIMPLE_OMP_SINGLE:
1720 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1723 return wi->callback_result;
1726 case GIMPLE_WITH_CLEANUP_EXPR:
1727 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1730 return wi->callback_result;
1734 gcc_assert (!gimple_has_substatements (stmt));
1742 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1745 gimple_set_body (tree fndecl, gimple_seq seq)
1747 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1750 /* If FNDECL still does not have a function structure associated
1751 with it, then it does not make sense for it to receive a
1753 gcc_assert (seq == NULL);
1756 fn->gimple_body = seq;
1760 /* Return the body of GIMPLE statements for function FN. After the
1761 CFG pass, the function body doesn't exist anymore because it has
1762 been split up into basic blocks. In this case, it returns
1766 gimple_body (tree fndecl)
1768 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1769 return fn ? fn->gimple_body : NULL;
1772 /* Return true when FNDECL has Gimple body either in unlowered
1775 gimple_has_body_p (tree fndecl)
1777 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1778 return (gimple_body (fndecl) || (fn && fn->cfg));
1781 /* Detect flags from a GIMPLE_CALL. This is just like
1782 call_expr_flags, but for gimple tuples. */
1785 gimple_call_flags (const_gimple stmt)
1788 tree decl = gimple_call_fndecl (stmt);
1791 flags = flags_from_decl_or_type (decl);
1793 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1795 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1796 flags |= ECF_NOTHROW;
1801 /* Detects argument flags for argument number ARG on call STMT. */
1804 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1806 tree type = gimple_call_fntype (stmt);
1807 tree attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1811 attr = TREE_VALUE (TREE_VALUE (attr));
1812 if (1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1815 switch (TREE_STRING_POINTER (attr)[1 + arg])
1822 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1825 return EAF_NOCLOBBER | EAF_NOESCAPE;
1828 return EAF_DIRECT | EAF_NOESCAPE;
1831 return EAF_NOESCAPE;
1839 /* Detects return flags for the call STMT. */
1842 gimple_call_return_flags (const_gimple stmt)
1845 tree attr = NULL_TREE;
1847 if (gimple_call_flags (stmt) & ECF_MALLOC)
1850 type = gimple_call_fntype (stmt);
1851 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1855 attr = TREE_VALUE (TREE_VALUE (attr));
1856 if (TREE_STRING_LENGTH (attr) < 1)
1859 switch (TREE_STRING_POINTER (attr)[0])
1865 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1877 /* Return true if GS is a copy assignment. */
1880 gimple_assign_copy_p (gimple gs)
1882 return (gimple_assign_single_p (gs)
1883 && is_gimple_val (gimple_op (gs, 1)));
1887 /* Return true if GS is a SSA_NAME copy assignment. */
1890 gimple_assign_ssa_name_copy_p (gimple gs)
1892 return (gimple_assign_single_p (gs)
1893 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1894 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1898 /* Return true if GS is an assignment with a unary RHS, but the
1899 operator has no effect on the assigned value. The logic is adapted
1900 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1901 instances in which STRIP_NOPS was previously applied to the RHS of
1904 NOTE: In the use cases that led to the creation of this function
1905 and of gimple_assign_single_p, it is typical to test for either
1906 condition and to proceed in the same manner. In each case, the
1907 assigned value is represented by the single RHS operand of the
1908 assignment. I suspect there may be cases where gimple_assign_copy_p,
1909 gimple_assign_single_p, or equivalent logic is used where a similar
1910 treatment of unary NOPs is appropriate. */
1913 gimple_assign_unary_nop_p (gimple gs)
1915 return (is_gimple_assign (gs)
1916 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1917 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1918 && gimple_assign_rhs1 (gs) != error_mark_node
1919 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1920 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1923 /* Set BB to be the basic block holding G. */
1926 gimple_set_bb (gimple stmt, basic_block bb)
1928 stmt->gsbase.bb = bb;
1930 /* If the statement is a label, add the label to block-to-labels map
1931 so that we can speed up edge creation for GIMPLE_GOTOs. */
1932 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1937 t = gimple_label_label (stmt);
1938 uid = LABEL_DECL_UID (t);
1941 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1942 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1943 if (old_len <= (unsigned) uid)
1945 unsigned new_len = 3 * uid / 2 + 1;
1947 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1952 VEC_replace (basic_block, label_to_block_map, uid, bb);
1957 /* Modify the RHS of the assignment pointed-to by GSI using the
1958 operands in the expression tree EXPR.
1960 NOTE: The statement pointed-to by GSI may be reallocated if it
1961 did not have enough operand slots.
1963 This function is useful to convert an existing tree expression into
1964 the flat representation used for the RHS of a GIMPLE assignment.
1965 It will reallocate memory as needed to expand or shrink the number
1966 of operand slots needed to represent EXPR.
1968 NOTE: If you find yourself building a tree and then calling this
1969 function, you are most certainly doing it the slow way. It is much
1970 better to build a new assignment or to use the function
1971 gimple_assign_set_rhs_with_ops, which does not require an
1972 expression tree to be built. */
1975 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1977 enum tree_code subcode;
1980 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1981 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1985 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1986 operands OP1, OP2 and OP3.
1988 NOTE: The statement pointed-to by GSI may be reallocated if it
1989 did not have enough operand slots. */
1992 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1993 tree op1, tree op2, tree op3)
1995 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1996 gimple stmt = gsi_stmt (*gsi);
1998 /* If the new CODE needs more operands, allocate a new statement. */
1999 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2001 tree lhs = gimple_assign_lhs (stmt);
2002 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2003 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2004 gsi_replace (gsi, new_stmt, true);
2007 /* The LHS needs to be reset as this also changes the SSA name
2009 gimple_assign_set_lhs (stmt, lhs);
2012 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2013 gimple_set_subcode (stmt, code);
2014 gimple_assign_set_rhs1 (stmt, op1);
2015 if (new_rhs_ops > 1)
2016 gimple_assign_set_rhs2 (stmt, op2);
2017 if (new_rhs_ops > 2)
2018 gimple_assign_set_rhs3 (stmt, op3);
2022 /* Return the LHS of a statement that performs an assignment,
2023 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2024 for a call to a function that returns no value, or for a
2025 statement other than an assignment or a call. */
2028 gimple_get_lhs (const_gimple stmt)
2030 enum gimple_code code = gimple_code (stmt);
2032 if (code == GIMPLE_ASSIGN)
2033 return gimple_assign_lhs (stmt);
2034 else if (code == GIMPLE_CALL)
2035 return gimple_call_lhs (stmt);
2041 /* Set the LHS of a statement that performs an assignment,
2042 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2045 gimple_set_lhs (gimple stmt, tree lhs)
2047 enum gimple_code code = gimple_code (stmt);
2049 if (code == GIMPLE_ASSIGN)
2050 gimple_assign_set_lhs (stmt, lhs);
2051 else if (code == GIMPLE_CALL)
2052 gimple_call_set_lhs (stmt, lhs);
2057 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2058 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2059 expression with a different value.
2061 This will update any annotations (say debug bind stmts) referring
2062 to the original LHS, so that they use the RHS instead. This is
2063 done even if NLHS and LHS are the same, for it is understood that
2064 the RHS will be modified afterwards, and NLHS will not be assigned
2065 an equivalent value.
2067 Adjusting any non-annotation uses of the LHS, if needed, is a
2068 responsibility of the caller.
2070 The effect of this call should be pretty much the same as that of
2071 inserting a copy of STMT before STMT, and then removing the
2072 original stmt, at which time gsi_remove() would have update
2073 annotations, but using this function saves all the inserting,
2074 copying and removing. */
2077 gimple_replace_lhs (gimple stmt, tree nlhs)
2079 if (MAY_HAVE_DEBUG_STMTS)
2081 tree lhs = gimple_get_lhs (stmt);
2083 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2085 insert_debug_temp_for_var_def (NULL, lhs);
2088 gimple_set_lhs (stmt, nlhs);
2091 /* Return a deep copy of statement STMT. All the operands from STMT
2092 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2093 and VUSE operand arrays are set to empty in the new copy. */
2096 gimple_copy (gimple stmt)
2098 enum gimple_code code = gimple_code (stmt);
2099 unsigned num_ops = gimple_num_ops (stmt);
2100 gimple copy = gimple_alloc (code, num_ops);
2103 /* Shallow copy all the fields from STMT. */
2104 memcpy (copy, stmt, gimple_size (code));
2106 /* If STMT has sub-statements, deep-copy them as well. */
2107 if (gimple_has_substatements (stmt))
2112 switch (gimple_code (stmt))
2115 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2116 gimple_bind_set_body (copy, new_seq);
2117 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2118 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2122 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2123 gimple_catch_set_handler (copy, new_seq);
2124 t = unshare_expr (gimple_catch_types (stmt));
2125 gimple_catch_set_types (copy, t);
2128 case GIMPLE_EH_FILTER:
2129 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2130 gimple_eh_filter_set_failure (copy, new_seq);
2131 t = unshare_expr (gimple_eh_filter_types (stmt));
2132 gimple_eh_filter_set_types (copy, t);
2136 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2137 gimple_try_set_eval (copy, new_seq);
2138 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2139 gimple_try_set_cleanup (copy, new_seq);
2142 case GIMPLE_OMP_FOR:
2143 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2144 gimple_omp_for_set_pre_body (copy, new_seq);
2145 t = unshare_expr (gimple_omp_for_clauses (stmt));
2146 gimple_omp_for_set_clauses (copy, t);
2147 copy->gimple_omp_for.iter
2148 = ggc_alloc_vec_gimple_omp_for_iter
2149 (gimple_omp_for_collapse (stmt));
2150 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2152 gimple_omp_for_set_cond (copy, i,
2153 gimple_omp_for_cond (stmt, i));
2154 gimple_omp_for_set_index (copy, i,
2155 gimple_omp_for_index (stmt, i));
2156 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2157 gimple_omp_for_set_initial (copy, i, t);
2158 t = unshare_expr (gimple_omp_for_final (stmt, i));
2159 gimple_omp_for_set_final (copy, i, t);
2160 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2161 gimple_omp_for_set_incr (copy, i, t);
2165 case GIMPLE_OMP_PARALLEL:
2166 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2167 gimple_omp_parallel_set_clauses (copy, t);
2168 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2169 gimple_omp_parallel_set_child_fn (copy, t);
2170 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2171 gimple_omp_parallel_set_data_arg (copy, t);
2174 case GIMPLE_OMP_TASK:
2175 t = unshare_expr (gimple_omp_task_clauses (stmt));
2176 gimple_omp_task_set_clauses (copy, t);
2177 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2178 gimple_omp_task_set_child_fn (copy, t);
2179 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2180 gimple_omp_task_set_data_arg (copy, t);
2181 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2182 gimple_omp_task_set_copy_fn (copy, t);
2183 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2184 gimple_omp_task_set_arg_size (copy, t);
2185 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2186 gimple_omp_task_set_arg_align (copy, t);
2189 case GIMPLE_OMP_CRITICAL:
2190 t = unshare_expr (gimple_omp_critical_name (stmt));
2191 gimple_omp_critical_set_name (copy, t);
2194 case GIMPLE_OMP_SECTIONS:
2195 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2196 gimple_omp_sections_set_clauses (copy, t);
2197 t = unshare_expr (gimple_omp_sections_control (stmt));
2198 gimple_omp_sections_set_control (copy, t);
2201 case GIMPLE_OMP_SINGLE:
2202 case GIMPLE_OMP_SECTION:
2203 case GIMPLE_OMP_MASTER:
2204 case GIMPLE_OMP_ORDERED:
2206 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2207 gimple_omp_set_body (copy, new_seq);
2210 case GIMPLE_WITH_CLEANUP_EXPR:
2211 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2212 gimple_wce_set_cleanup (copy, new_seq);
2220 /* Make copy of operands. */
2223 for (i = 0; i < num_ops; i++)
2224 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2226 /* Clear out SSA operand vectors on COPY. */
2227 if (gimple_has_ops (stmt))
2229 gimple_set_def_ops (copy, NULL);
2230 gimple_set_use_ops (copy, NULL);
2233 if (gimple_has_mem_ops (stmt))
2235 gimple_set_vdef (copy, gimple_vdef (stmt));
2236 gimple_set_vuse (copy, gimple_vuse (stmt));
2239 /* SSA operands need to be updated. */
2240 gimple_set_modified (copy, true);
2247 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2248 a MODIFIED field. */
2251 gimple_set_modified (gimple s, bool modifiedp)
2253 if (gimple_has_ops (s))
2254 s->gsbase.modified = (unsigned) modifiedp;
2258 /* Return true if statement S has side-effects. We consider a
2259 statement to have side effects if:
2261 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2262 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2265 gimple_has_side_effects (const_gimple s)
2269 if (is_gimple_debug (s))
2272 /* We don't have to scan the arguments to check for
2273 volatile arguments, though, at present, we still
2274 do a scan to check for TREE_SIDE_EFFECTS. */
2275 if (gimple_has_volatile_ops (s))
2278 if (is_gimple_call (s))
2280 unsigned nargs = gimple_call_num_args (s);
2282 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2284 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2285 /* An infinite loop is considered a side effect. */
2288 if (gimple_call_lhs (s)
2289 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2291 gcc_assert (gimple_has_volatile_ops (s));
2295 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2298 for (i = 0; i < nargs; i++)
2299 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2301 gcc_assert (gimple_has_volatile_ops (s));
2309 for (i = 0; i < gimple_num_ops (s); i++)
2310 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2312 gcc_assert (gimple_has_volatile_ops (s));
2320 /* Return true if the RHS of statement S has side effects.
2321 We may use it to determine if it is admissable to replace
2322 an assignment or call with a copy of a previously-computed
2323 value. In such cases, side-effects due to the LHS are
2327 gimple_rhs_has_side_effects (const_gimple s)
2331 if (is_gimple_call (s))
2333 unsigned nargs = gimple_call_num_args (s);
2335 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2338 /* We cannot use gimple_has_volatile_ops here,
2339 because we must ignore a volatile LHS. */
2340 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2341 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2343 gcc_assert (gimple_has_volatile_ops (s));
2347 for (i = 0; i < nargs; i++)
2348 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2349 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2354 else if (is_gimple_assign (s))
2356 /* Skip the first operand, the LHS. */
2357 for (i = 1; i < gimple_num_ops (s); i++)
2358 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2359 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2361 gcc_assert (gimple_has_volatile_ops (s));
2365 else if (is_gimple_debug (s))
2369 /* For statements without an LHS, examine all arguments. */
2370 for (i = 0; i < gimple_num_ops (s); i++)
2371 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2372 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2374 gcc_assert (gimple_has_volatile_ops (s));
2382 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2383 Return true if S can trap. When INCLUDE_MEM is true, check whether
2384 the memory operations could trap. When INCLUDE_STORES is true and
2385 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2388 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2390 tree t, div = NULL_TREE;
2395 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2397 for (i = start; i < gimple_num_ops (s); i++)
2398 if (tree_could_trap_p (gimple_op (s, i)))
2402 switch (gimple_code (s))
2405 return gimple_asm_volatile_p (s);
2408 t = gimple_call_fndecl (s);
2409 /* Assume that calls to weak functions may trap. */
2410 if (!t || !DECL_P (t) || DECL_WEAK (t))
2415 t = gimple_expr_type (s);
2416 op = gimple_assign_rhs_code (s);
2417 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2418 div = gimple_assign_rhs2 (s);
2419 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2420 (INTEGRAL_TYPE_P (t)
2421 && TYPE_OVERFLOW_TRAPS (t)),
2431 /* Return true if statement S can trap. */
2434 gimple_could_trap_p (gimple s)
2436 return gimple_could_trap_p_1 (s, true, true);
2439 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2442 gimple_assign_rhs_could_trap_p (gimple s)
2444 gcc_assert (is_gimple_assign (s));
2445 return gimple_could_trap_p_1 (s, true, false);
2449 /* Print debugging information for gimple stmts generated. */
2452 dump_gimple_statistics (void)
2454 #ifdef GATHER_STATISTICS
2455 int i, total_tuples = 0, total_bytes = 0;
2457 fprintf (stderr, "\nGIMPLE statements\n");
2458 fprintf (stderr, "Kind Stmts Bytes\n");
2459 fprintf (stderr, "---------------------------------------\n");
2460 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2462 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2463 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2464 total_tuples += gimple_alloc_counts[i];
2465 total_bytes += gimple_alloc_sizes[i];
2467 fprintf (stderr, "---------------------------------------\n");
2468 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2469 fprintf (stderr, "---------------------------------------\n");
2471 fprintf (stderr, "No gimple statistics\n");
2476 /* Return the number of operands needed on the RHS of a GIMPLE
2477 assignment for an expression with tree code CODE. */
2480 get_gimple_rhs_num_ops (enum tree_code code)
2482 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2484 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2486 else if (rhs_class == GIMPLE_BINARY_RHS)
2488 else if (rhs_class == GIMPLE_TERNARY_RHS)
2494 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2496 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2497 : ((TYPE) == tcc_binary \
2498 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2499 : ((TYPE) == tcc_constant \
2500 || (TYPE) == tcc_declaration \
2501 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2502 : ((SYM) == TRUTH_AND_EXPR \
2503 || (SYM) == TRUTH_OR_EXPR \
2504 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2505 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2506 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2507 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2508 || (SYM) == DOT_PROD_EXPR \
2509 || (SYM) == REALIGN_LOAD_EXPR \
2510 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2511 : ((SYM) == COND_EXPR \
2512 || (SYM) == CONSTRUCTOR \
2513 || (SYM) == OBJ_TYPE_REF \
2514 || (SYM) == ASSERT_EXPR \
2515 || (SYM) == ADDR_EXPR \
2516 || (SYM) == WITH_SIZE_EXPR \
2517 || (SYM) == SSA_NAME \
2518 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2519 : GIMPLE_INVALID_RHS),
2520 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2522 const unsigned char gimple_rhs_class_table[] = {
2523 #include "all-tree.def"
2527 #undef END_OF_BASE_TREE_CODES
2529 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2531 /* Validation of GIMPLE expressions. */
2533 /* Returns true iff T is a valid RHS for an assignment to a renamed
2534 user -- or front-end generated artificial -- variable. */
2537 is_gimple_reg_rhs (tree t)
2539 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2542 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2543 LHS, or for a call argument. */
2546 is_gimple_mem_rhs (tree t)
2548 /* If we're dealing with a renamable type, either source or dest must be
2549 a renamed variable. */
2550 if (is_gimple_reg_type (TREE_TYPE (t)))
2551 return is_gimple_val (t);
2553 return is_gimple_val (t) || is_gimple_lvalue (t);
2556 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2559 is_gimple_lvalue (tree t)
2561 return (is_gimple_addressable (t)
2562 || TREE_CODE (t) == WITH_SIZE_EXPR
2563 /* These are complex lvalues, but don't have addresses, so they
2565 || TREE_CODE (t) == BIT_FIELD_REF);
2568 /* Return true if T is a GIMPLE condition. */
2571 is_gimple_condexpr (tree t)
2573 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2574 && !tree_could_throw_p (t)
2575 && is_gimple_val (TREE_OPERAND (t, 0))
2576 && is_gimple_val (TREE_OPERAND (t, 1))));
2579 /* Return true if T is something whose address can be taken. */
2582 is_gimple_addressable (tree t)
2584 return (is_gimple_id (t) || handled_component_p (t)
2585 || TREE_CODE (t) == MEM_REF);
2588 /* Return true if T is a valid gimple constant. */
2591 is_gimple_constant (const_tree t)
2593 switch (TREE_CODE (t))
2603 /* Vector constant constructors are gimple invariant. */
2605 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2606 return TREE_CONSTANT (t);
2615 /* Return true if T is a gimple address. */
2618 is_gimple_address (const_tree t)
2622 if (TREE_CODE (t) != ADDR_EXPR)
2625 op = TREE_OPERAND (t, 0);
2626 while (handled_component_p (op))
2628 if ((TREE_CODE (op) == ARRAY_REF
2629 || TREE_CODE (op) == ARRAY_RANGE_REF)
2630 && !is_gimple_val (TREE_OPERAND (op, 1)))
2633 op = TREE_OPERAND (op, 0);
2636 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2639 switch (TREE_CODE (op))
2654 /* Strip out all handled components that produce invariant
2658 strip_invariant_refs (const_tree op)
2660 while (handled_component_p (op))
2662 switch (TREE_CODE (op))
2665 case ARRAY_RANGE_REF:
2666 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2667 || TREE_OPERAND (op, 2) != NULL_TREE
2668 || TREE_OPERAND (op, 3) != NULL_TREE)
2673 if (TREE_OPERAND (op, 2) != NULL_TREE)
2679 op = TREE_OPERAND (op, 0);
2685 /* Return true if T is a gimple invariant address. */
2688 is_gimple_invariant_address (const_tree t)
2692 if (TREE_CODE (t) != ADDR_EXPR)
2695 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2699 if (TREE_CODE (op) == MEM_REF)
2701 const_tree op0 = TREE_OPERAND (op, 0);
2702 return (TREE_CODE (op0) == ADDR_EXPR
2703 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2704 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2707 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2710 /* Return true if T is a gimple invariant address at IPA level
2711 (so addresses of variables on stack are not allowed). */
2714 is_gimple_ip_invariant_address (const_tree t)
2718 if (TREE_CODE (t) != ADDR_EXPR)
2721 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2723 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2726 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2727 form of function invariant. */
2730 is_gimple_min_invariant (const_tree t)
2732 if (TREE_CODE (t) == ADDR_EXPR)
2733 return is_gimple_invariant_address (t);
2735 return is_gimple_constant (t);
2738 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2739 form of gimple minimal invariant. */
2742 is_gimple_ip_invariant (const_tree t)
2744 if (TREE_CODE (t) == ADDR_EXPR)
2745 return is_gimple_ip_invariant_address (t);
2747 return is_gimple_constant (t);
2750 /* Return true if T looks like a valid GIMPLE statement. */
2753 is_gimple_stmt (tree t)
2755 const enum tree_code code = TREE_CODE (t);
2760 /* The only valid NOP_EXPR is the empty statement. */
2761 return IS_EMPTY_STMT (t);
2765 /* These are only valid if they're void. */
2766 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2772 case CASE_LABEL_EXPR:
2773 case TRY_CATCH_EXPR:
2774 case TRY_FINALLY_EXPR:
2775 case EH_FILTER_EXPR:
2778 case STATEMENT_LIST:
2788 /* These are always void. */
2794 /* These are valid regardless of their type. */
2802 /* Return true if T is a variable. */
2805 is_gimple_variable (tree t)
2807 return (TREE_CODE (t) == VAR_DECL
2808 || TREE_CODE (t) == PARM_DECL
2809 || TREE_CODE (t) == RESULT_DECL
2810 || TREE_CODE (t) == SSA_NAME);
2813 /* Return true if T is a GIMPLE identifier (something with an address). */
2816 is_gimple_id (tree t)
2818 return (is_gimple_variable (t)
2819 || TREE_CODE (t) == FUNCTION_DECL
2820 || TREE_CODE (t) == LABEL_DECL
2821 || TREE_CODE (t) == CONST_DECL
2822 /* Allow string constants, since they are addressable. */
2823 || TREE_CODE (t) == STRING_CST);
2826 /* Return true if TYPE is a suitable type for a scalar register variable. */
2829 is_gimple_reg_type (tree type)
2831 return !AGGREGATE_TYPE_P (type);
2834 /* Return true if T is a non-aggregate register variable. */
2837 is_gimple_reg (tree t)
2839 if (TREE_CODE (t) == SSA_NAME)
2840 t = SSA_NAME_VAR (t);
2842 if (!is_gimple_variable (t))
2845 if (!is_gimple_reg_type (TREE_TYPE (t)))
2848 /* A volatile decl is not acceptable because we can't reuse it as
2849 needed. We need to copy it into a temp first. */
2850 if (TREE_THIS_VOLATILE (t))
2853 /* We define "registers" as things that can be renamed as needed,
2854 which with our infrastructure does not apply to memory. */
2855 if (needs_to_live_in_memory (t))
2858 /* Hard register variables are an interesting case. For those that
2859 are call-clobbered, we don't know where all the calls are, since
2860 we don't (want to) take into account which operations will turn
2861 into libcalls at the rtl level. For those that are call-saved,
2862 we don't currently model the fact that calls may in fact change
2863 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2864 level, and so miss variable changes that might imply. All around,
2865 it seems safest to not do too much optimization with these at the
2866 tree level at all. We'll have to rely on the rtl optimizers to
2867 clean this up, as there we've got all the appropriate bits exposed. */
2868 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2871 /* Complex and vector values must have been put into SSA-like form.
2872 That is, no assignments to the individual components. */
2873 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2874 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2875 return DECL_GIMPLE_REG_P (t);
2881 /* Return true if T is a GIMPLE variable whose address is not needed. */
2884 is_gimple_non_addressable (tree t)
2886 if (TREE_CODE (t) == SSA_NAME)
2887 t = SSA_NAME_VAR (t);
2889 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2892 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2895 is_gimple_val (tree t)
2897 /* Make loads from volatiles and memory vars explicit. */
2898 if (is_gimple_variable (t)
2899 && is_gimple_reg_type (TREE_TYPE (t))
2900 && !is_gimple_reg (t))
2903 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2906 /* Similarly, but accept hard registers as inputs to asm statements. */
2909 is_gimple_asm_val (tree t)
2911 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2914 return is_gimple_val (t);
2917 /* Return true if T is a GIMPLE minimal lvalue. */
2920 is_gimple_min_lval (tree t)
2922 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2924 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2927 /* Return true if T is a valid function operand of a CALL_EXPR. */
2930 is_gimple_call_addr (tree t)
2932 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2935 /* Return true if T is a valid address operand of a MEM_REF. */
2938 is_gimple_mem_ref_addr (tree t)
2940 return (is_gimple_reg (t)
2941 || TREE_CODE (t) == INTEGER_CST
2942 || (TREE_CODE (t) == ADDR_EXPR
2943 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2944 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2947 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2948 Otherwise, return NULL_TREE. */
2951 get_call_expr_in (tree t)
2953 if (TREE_CODE (t) == MODIFY_EXPR)
2954 t = TREE_OPERAND (t, 1);
2955 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2956 t = TREE_OPERAND (t, 0);
2957 if (TREE_CODE (t) == CALL_EXPR)
2963 /* Given a memory reference expression T, return its base address.
2964 The base address of a memory reference expression is the main
2965 object being referenced. For instance, the base address for
2966 'array[i].fld[j]' is 'array'. You can think of this as stripping
2967 away the offset part from a memory address.
2969 This function calls handled_component_p to strip away all the inner
2970 parts of the memory reference until it reaches the base object. */
2973 get_base_address (tree t)
2975 while (handled_component_p (t))
2976 t = TREE_OPERAND (t, 0);
2978 if ((TREE_CODE (t) == MEM_REF
2979 || TREE_CODE (t) == TARGET_MEM_REF)
2980 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2981 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2983 if (TREE_CODE (t) == SSA_NAME
2985 || TREE_CODE (t) == STRING_CST
2986 || TREE_CODE (t) == CONSTRUCTOR
2987 || INDIRECT_REF_P (t)
2988 || TREE_CODE (t) == MEM_REF
2989 || TREE_CODE (t) == TARGET_MEM_REF)
2996 recalculate_side_effects (tree t)
2998 enum tree_code code = TREE_CODE (t);
2999 int len = TREE_OPERAND_LENGTH (t);
3002 switch (TREE_CODE_CLASS (code))
3004 case tcc_expression:
3010 case PREDECREMENT_EXPR:
3011 case PREINCREMENT_EXPR:
3012 case POSTDECREMENT_EXPR:
3013 case POSTINCREMENT_EXPR:
3014 /* All of these have side-effects, no matter what their
3023 case tcc_comparison: /* a comparison expression */
3024 case tcc_unary: /* a unary arithmetic expression */
3025 case tcc_binary: /* a binary arithmetic expression */
3026 case tcc_reference: /* a reference */
3027 case tcc_vl_exp: /* a function call */
3028 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3029 for (i = 0; i < len; ++i)
3031 tree op = TREE_OPERAND (t, i);
3032 if (op && TREE_SIDE_EFFECTS (op))
3033 TREE_SIDE_EFFECTS (t) = 1;
3038 /* No side-effects. */
3046 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3047 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3048 we failed to create one. */
3051 canonicalize_cond_expr_cond (tree t)
3053 /* Strip conversions around boolean operations. */
3054 if (CONVERT_EXPR_P (t)
3055 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3056 t = TREE_OPERAND (t, 0);
3058 /* For (bool)x use x != 0. */
3059 if (CONVERT_EXPR_P (t)
3060 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3062 tree top0 = TREE_OPERAND (t, 0);
3063 t = build2 (NE_EXPR, TREE_TYPE (t),
3064 top0, build_int_cst (TREE_TYPE (top0), 0));
3066 /* For !x use x == 0. */
3067 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3069 tree top0 = TREE_OPERAND (t, 0);
3070 t = build2 (EQ_EXPR, TREE_TYPE (t),
3071 top0, build_int_cst (TREE_TYPE (top0), 0));
3073 /* For cmp ? 1 : 0 use cmp. */
3074 else if (TREE_CODE (t) == COND_EXPR
3075 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3076 && integer_onep (TREE_OPERAND (t, 1))
3077 && integer_zerop (TREE_OPERAND (t, 2)))
3079 tree top0 = TREE_OPERAND (t, 0);
3080 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3081 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3084 if (is_gimple_condexpr (t))
3090 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3091 the positions marked by the set ARGS_TO_SKIP. */
3094 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3097 tree fn = gimple_call_fn (stmt);
3098 int nargs = gimple_call_num_args (stmt);
3099 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3102 for (i = 0; i < nargs; i++)
3103 if (!bitmap_bit_p (args_to_skip, i))
3104 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3106 new_stmt = gimple_build_call_vec (fn, vargs);
3107 VEC_free (tree, heap, vargs);
3108 if (gimple_call_lhs (stmt))
3109 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3111 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3112 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3114 gimple_set_block (new_stmt, gimple_block (stmt));
3115 if (gimple_has_location (stmt))
3116 gimple_set_location (new_stmt, gimple_location (stmt));
3117 gimple_call_copy_flags (new_stmt, stmt);
3118 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3120 gimple_set_modified (new_stmt, true);
3126 static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
3128 /* Structure used to maintain a cache of some type pairs compared by
3129 gimple_types_compatible_p when comparing aggregate types. There are
3130 three possible values for SAME_P:
3132 -2: The pair (T1, T2) has just been inserted in the table.
3133 0: T1 and T2 are different types.
3134 1: T1 and T2 are the same type.
3136 The two elements in the SAME_P array are indexed by the comparison
3143 signed char same_p[2];
3145 typedef struct type_pair_d *type_pair_t;
3147 DEF_VEC_P(type_pair_t);
3148 DEF_VEC_ALLOC_P(type_pair_t,heap);
3150 /* Return a hash value for the type pair pointed-to by P. */
3153 type_pair_hash (const void *p)
3155 const struct type_pair_d *pair = (const struct type_pair_d *) p;
3156 hashval_t val1 = pair->uid1;
3157 hashval_t val2 = pair->uid2;
3158 return (iterative_hash_hashval_t (val2, val1)
3159 ^ iterative_hash_hashval_t (val1, val2));
3162 /* Compare two type pairs pointed-to by P1 and P2. */
3165 type_pair_eq (const void *p1, const void *p2)
3167 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3168 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3169 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3170 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
3173 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3174 entry if none existed. */
3177 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3179 struct type_pair_d pair;
3183 if (*visited_p == NULL)
3185 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3186 gcc_obstack_init (ob_p);
3189 pair.uid1 = TYPE_UID (t1);
3190 pair.uid2 = TYPE_UID (t2);
3191 slot = htab_find_slot (*visited_p, &pair, INSERT);
3194 p = *((type_pair_t *) slot);
3197 p = XOBNEW (ob_p, struct type_pair_d);
3198 p->uid1 = TYPE_UID (t1);
3199 p->uid2 = TYPE_UID (t2);
3208 /* Per pointer state for the SCC finding. The on_sccstack flag
3209 is not strictly required, it is true when there is no hash value
3210 recorded for the type and false otherwise. But querying that
3215 unsigned int dfsnum;
3224 static unsigned int next_dfs_num;
3225 static unsigned int gtc_next_dfs_num;
3228 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3230 typedef struct GTY(()) gimple_type_leader_entry_s {
3233 } gimple_type_leader_entry;
3235 #define GIMPLE_TYPE_LEADER_SIZE 16381
3236 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3237 gimple_type_leader_entry *gimple_type_leader;
3239 /* Lookup an existing leader for T and return it or NULL_TREE, if
3240 there is none in the cache. */
3243 gimple_lookup_type_leader (tree t)
3245 gimple_type_leader_entry *leader;
3247 if (!gimple_type_leader)
3250 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3251 if (leader->type != t)
3254 return leader->leader;
3257 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3258 true then if any type has no name return false, otherwise return
3259 true if both types have no names. */
3262 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3264 tree name1 = TYPE_NAME (t1);
3265 tree name2 = TYPE_NAME (t2);
3267 /* Consider anonymous types all unique for completion. */
3268 if (for_completion_p
3269 && (!name1 || !name2))
3272 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3274 name1 = DECL_NAME (name1);
3275 if (for_completion_p
3279 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3281 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3283 name2 = DECL_NAME (name2);
3284 if (for_completion_p
3288 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3290 /* Identifiers can be compared with pointer equality rather
3291 than a string comparison. */
3298 /* Return true if the field decls F1 and F2 are at the same offset.
3300 This is intended to be used on GIMPLE types only. */
3303 gimple_compare_field_offset (tree f1, tree f2)
3305 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3307 tree offset1 = DECL_FIELD_OFFSET (f1);
3308 tree offset2 = DECL_FIELD_OFFSET (f2);
3309 return ((offset1 == offset2
3310 /* Once gimplification is done, self-referential offsets are
3311 instantiated as operand #2 of the COMPONENT_REF built for
3312 each access and reset. Therefore, they are not relevant
3313 anymore and fields are interchangeable provided that they
3314 represent the same access. */
3315 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3316 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3317 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3318 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3319 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3320 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3321 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3322 || operand_equal_p (offset1, offset2, 0))
3323 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3324 DECL_FIELD_BIT_OFFSET (f2)));
3327 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3328 should be, so handle differing ones specially by decomposing
3329 the offset into a byte and bit offset manually. */
3330 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3331 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3333 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3334 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3335 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3336 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3337 + bit_offset1 / BITS_PER_UNIT);
3338 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3339 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3340 + bit_offset2 / BITS_PER_UNIT);
3341 if (byte_offset1 != byte_offset2)
3343 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3349 /* If the type T1 and the type T2 are a complete and an incomplete
3350 variant of the same type return true. */
3353 gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
3355 /* If one pointer points to an incomplete type variant of
3356 the other pointed-to type they are the same. */
3357 if (TREE_CODE (t1) == TREE_CODE (t2)
3358 && RECORD_OR_UNION_TYPE_P (t1)
3359 && (!COMPLETE_TYPE_P (t1)
3360 || !COMPLETE_TYPE_P (t2))
3361 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3362 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3363 TYPE_MAIN_VARIANT (t2), true))
3369 gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
3370 VEC(type_pair_t, heap) **,
3371 struct pointer_map_t *, struct obstack *);
3373 /* DFS visit the edge from the callers type pair with state *STATE to
3374 the pair T1, T2 while operating in FOR_MERGING_P mode.
3375 Update the merging status if it is not part of the SCC containing the
3376 callers pair and return it.
3377 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3380 gtc_visit (tree t1, tree t2, enum gtc_mode mode,
3382 VEC(type_pair_t, heap) **sccstack,
3383 struct pointer_map_t *sccstate,
3384 struct obstack *sccstate_obstack)
3386 struct sccs *cstate = NULL;
3390 /* Check first for the obvious case of pointer identity. */
3394 /* Check that we have two types to compare. */
3395 if (t1 == NULL_TREE || t2 == NULL_TREE)
3398 /* If the types have been previously registered and found equal
3400 if (mode == GTC_MERGE)
3402 tree leader1 = gimple_lookup_type_leader (t1);
3403 tree leader2 = gimple_lookup_type_leader (t2);
3406 || (leader1 && leader1 == leader2))
3409 else if (mode == GTC_DIAG)
3411 if (TYPE_CANONICAL (t1)
3412 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3416 /* Can't be the same type if the types don't have the same code. */
3417 if (TREE_CODE (t1) != TREE_CODE (t2))
3420 /* Can't be the same type if they have different CV qualifiers. */
3421 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3424 /* Void types are always the same. */
3425 if (TREE_CODE (t1) == VOID_TYPE)
3428 /* Do some simple checks before doing three hashtable queries. */
3429 if (INTEGRAL_TYPE_P (t1)
3430 || SCALAR_FLOAT_TYPE_P (t1)
3431 || FIXED_POINT_TYPE_P (t1)
3432 || TREE_CODE (t1) == VECTOR_TYPE
3433 || TREE_CODE (t1) == COMPLEX_TYPE
3434 || TREE_CODE (t1) == OFFSET_TYPE)
3436 /* Can't be the same type if they have different alignment,
3437 sign, precision or mode. */
3438 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3439 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3440 || TYPE_MODE (t1) != TYPE_MODE (t2)
3441 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3444 if (TREE_CODE (t1) == INTEGER_TYPE
3445 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3446 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3449 /* That's all we need to check for float and fixed-point types. */
3450 if (SCALAR_FLOAT_TYPE_P (t1)
3451 || FIXED_POINT_TYPE_P (t1))
3454 /* For integral types fall thru to more complex checks. */
3457 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3459 /* Can't be the same type if they have different alignment or mode. */
3460 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3461 || TYPE_MODE (t1) != TYPE_MODE (t2))
3465 /* If the hash values of t1 and t2 are different the types can't
3466 possibly be the same. This helps keeping the type-pair hashtable
3467 small, only tracking comparisons for hash collisions. */
3468 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3471 /* Allocate a new cache entry for this comparison. */
3472 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
3473 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3475 /* We have already decided whether T1 and T2 are the
3476 same, return the cached result. */
3477 return p->same_p[mode] == 1;
3480 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3481 cstate = (struct sccs *)*slot;
3482 /* Not yet visited. DFS recurse. */
3485 gimple_types_compatible_p_1 (t1, t2, mode, p,
3486 sccstack, sccstate, sccstate_obstack);
3487 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3488 state->low = MIN (state->low, cstate->low);
3490 /* If the type is still on the SCC stack adjust the parents low. */
3491 if (cstate->dfsnum < state->dfsnum
3492 && cstate->on_sccstack)
3493 state->low = MIN (cstate->dfsnum, state->low);
3495 /* Return the current lattice value. We start with an equality
3496 assumption so types part of a SCC will be optimistically
3497 treated equal unless proven otherwise. */
3498 return cstate->u.same_p;
3501 /* Worker for gimple_types_compatible.
3502 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3505 gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
3507 VEC(type_pair_t, heap) **sccstack,
3508 struct pointer_map_t *sccstate,
3509 struct obstack *sccstate_obstack)
3513 gcc_assert (p->same_p[mode] == -2);
3515 state = XOBNEW (sccstate_obstack, struct sccs);
3516 *pointer_map_insert (sccstate, p) = state;
3518 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3519 state->dfsnum = gtc_next_dfs_num++;
3520 state->low = state->dfsnum;
3521 state->on_sccstack = true;
3522 /* Start with an equality assumption. As we DFS recurse into child
3523 SCCs this assumption may get revisited. */
3524 state->u.same_p = 1;
3526 /* If their attributes are not the same they can't be the same type. */
3527 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3528 goto different_types;
3530 /* Do type-specific comparisons. */
3531 switch (TREE_CODE (t1))
3535 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3536 state, sccstack, sccstate, sccstate_obstack))
3537 goto different_types;
3541 /* Array types are the same if the element types are the same and
3542 the number of elements are the same. */
3543 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3544 state, sccstack, sccstate, sccstate_obstack)
3545 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3546 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3547 goto different_types;
3550 tree i1 = TYPE_DOMAIN (t1);
3551 tree i2 = TYPE_DOMAIN (t2);
3553 /* For an incomplete external array, the type domain can be
3554 NULL_TREE. Check this condition also. */
3555 if (i1 == NULL_TREE && i2 == NULL_TREE)
3557 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3558 goto different_types;
3559 /* If for a complete array type the possibly gimplified sizes
3560 are different the types are different. */
3561 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3564 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3565 goto different_types;
3568 tree min1 = TYPE_MIN_VALUE (i1);
3569 tree min2 = TYPE_MIN_VALUE (i2);
3570 tree max1 = TYPE_MAX_VALUE (i1);
3571 tree max2 = TYPE_MAX_VALUE (i2);
3573 /* The minimum/maximum values have to be the same. */
3576 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3577 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3578 || operand_equal_p (min1, min2, 0))))
3581 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3582 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3583 || operand_equal_p (max1, max2, 0)))))
3586 goto different_types;
3591 /* Method types should belong to the same class. */
3592 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3593 mode, state, sccstack, sccstate, sccstate_obstack))
3594 goto different_types;
3599 /* Function types are the same if the return type and arguments types
3601 if ((mode != GTC_DIAG
3602 || !gimple_compatible_complete_and_incomplete_subtype_p
3603 (TREE_TYPE (t1), TREE_TYPE (t2)))
3604 && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3605 state, sccstack, sccstate, sccstate_obstack))
3606 goto different_types;
3608 if (!comp_type_attributes (t1, t2))
3609 goto different_types;
3611 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3615 tree parms1, parms2;
3617 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3619 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3621 if ((mode == GTC_MERGE
3622 || !gimple_compatible_complete_and_incomplete_subtype_p
3623 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3624 && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
3625 state, sccstack, sccstate, sccstate_obstack))
3626 goto different_types;
3629 if (parms1 || parms2)
3630 goto different_types;
3637 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3638 state, sccstack, sccstate, sccstate_obstack)
3639 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3640 TYPE_OFFSET_BASETYPE (t2), mode,
3641 state, sccstack, sccstate, sccstate_obstack))
3642 goto different_types;
3648 case REFERENCE_TYPE:
3650 /* If the two pointers have different ref-all attributes,
3651 they can't be the same type. */
3652 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3653 goto different_types;
3655 /* If one pointer points to an incomplete type variant of
3656 the other pointed-to type they are the same. */
3657 if (mode == GTC_DIAG
3658 && gimple_compatible_complete_and_incomplete_subtype_p
3659 (TREE_TYPE (t1), TREE_TYPE (t2)))
3662 /* Otherwise, pointer and reference types are the same if the
3663 pointed-to types are the same. */
3664 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3665 state, sccstack, sccstate, sccstate_obstack))
3668 goto different_types;
3672 /* There is only one decltype(nullptr). */
3678 tree min1 = TYPE_MIN_VALUE (t1);
3679 tree max1 = TYPE_MAX_VALUE (t1);
3680 tree min2 = TYPE_MIN_VALUE (t2);
3681 tree max2 = TYPE_MAX_VALUE (t2);
3682 bool min_equal_p = false;
3683 bool max_equal_p = false;
3685 /* If either type has a minimum value, the other type must
3687 if (min1 == NULL_TREE && min2 == NULL_TREE)
3689 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3692 /* Likewise, if either type has a maximum value, the other
3693 type must have the same. */
3694 if (max1 == NULL_TREE && max2 == NULL_TREE)
3696 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3699 if (!min_equal_p || !max_equal_p)
3700 goto different_types;
3707 /* FIXME lto, we cannot check bounds on enumeral types because
3708 different front ends will produce different values.
3709 In C, enumeral types are integers, while in C++ each element
3710 will have its own symbolic value. We should decide how enums
3711 are to be represented in GIMPLE and have each front end lower
3715 /* For enumeral types, all the values must be the same. */
3716 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3719 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3721 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3723 tree c1 = TREE_VALUE (v1);
3724 tree c2 = TREE_VALUE (v2);
3726 if (TREE_CODE (c1) == CONST_DECL)
3727 c1 = DECL_INITIAL (c1);
3729 if (TREE_CODE (c2) == CONST_DECL)
3730 c2 = DECL_INITIAL (c2);
3732 if (tree_int_cst_equal (c1, c2) != 1)
3733 goto different_types;
3736 /* If one enumeration has more values than the other, they
3737 are not the same. */
3739 goto different_types;
3746 case QUAL_UNION_TYPE:
3750 /* The struct tags shall compare equal. */
3751 if (mode == GTC_MERGE
3752 && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3753 TYPE_MAIN_VARIANT (t2), false))
3754 goto different_types;
3756 /* For aggregate types, all the fields must be the same. */
3757 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3759 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3761 /* The fields must have the same name, offset and type. */
3762 if ((mode == GTC_MERGE
3763 && DECL_NAME (f1) != DECL_NAME (f2))
3764 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3765 || !gimple_compare_field_offset (f1, f2)
3766 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
3767 state, sccstack, sccstate, sccstate_obstack))
3768 goto different_types;
3771 /* If one aggregate has more fields than the other, they
3772 are not the same. */
3774 goto different_types;
3783 /* Common exit path for types that are not compatible. */
3785 state->u.same_p = 0;
3788 /* Common exit path for types that are compatible. */
3790 gcc_assert (state->u.same_p == 1);
3793 if (state->low == state->dfsnum)
3797 /* Pop off the SCC and set its cache values to the final
3798 comparison result. */
3801 struct sccs *cstate;
3802 x = VEC_pop (type_pair_t, *sccstack);
3803 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3804 cstate->on_sccstack = false;
3805 x->same_p[mode] = state->u.same_p;
3810 return state->u.same_p;
3813 /* Return true iff T1 and T2 are structurally identical. When
3814 FOR_MERGING_P is true the an incomplete type and a complete type
3815 are considered different, otherwise they are considered compatible. */
3818 gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
3820 VEC(type_pair_t, heap) *sccstack = NULL;
3821 struct pointer_map_t *sccstate;
3822 struct obstack sccstate_obstack;
3823 type_pair_t p = NULL;
3826 /* Before starting to set up the SCC machinery handle simple cases. */
3828 /* Check first for the obvious case of pointer identity. */
3832 /* Check that we have two types to compare. */
3833 if (t1 == NULL_TREE || t2 == NULL_TREE)
3836 /* If the types have been previously registered and found equal
3838 if (mode == GTC_MERGE)
3840 tree leader1 = gimple_lookup_type_leader (t1);
3841 tree leader2 = gimple_lookup_type_leader (t2);
3844 || (leader1 && leader1 == leader2))
3847 else if (mode == GTC_DIAG)
3849 if (TYPE_CANONICAL (t1)
3850 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3854 /* Can't be the same type if the types don't have the same code. */
3855 if (TREE_CODE (t1) != TREE_CODE (t2))
3858 /* Can't be the same type if they have different CV qualifiers. */
3859 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3862 /* Void types are always the same. */
3863 if (TREE_CODE (t1) == VOID_TYPE)
3866 /* Do some simple checks before doing three hashtable queries. */
3867 if (INTEGRAL_TYPE_P (t1)
3868 || SCALAR_FLOAT_TYPE_P (t1)
3869 || FIXED_POINT_TYPE_P (t1)
3870 || TREE_CODE (t1) == VECTOR_TYPE
3871 || TREE_CODE (t1) == COMPLEX_TYPE
3872 || TREE_CODE (t1) == OFFSET_TYPE)
3874 /* Can't be the same type if they have different alignment,
3875 sign, precision or mode. */
3876 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3877 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3878 || TYPE_MODE (t1) != TYPE_MODE (t2)
3879 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3882 if (TREE_CODE (t1) == INTEGER_TYPE
3883 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3884 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3887 /* That's all we need to check for float and fixed-point types. */
3888 if (SCALAR_FLOAT_TYPE_P (t1)
3889 || FIXED_POINT_TYPE_P (t1))
3892 /* For integral types fall thru to more complex checks. */
3895 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3897 /* Can't be the same type if they have different alignment or mode. */
3898 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3899 || TYPE_MODE (t1) != TYPE_MODE (t2))
3903 /* If the hash values of t1 and t2 are different the types can't
3904 possibly be the same. This helps keeping the type-pair hashtable
3905 small, only tracking comparisons for hash collisions. */
3906 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3909 /* If we've visited this type pair before (in the case of aggregates
3910 with self-referential types), and we made a decision, return it. */
3911 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
3912 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3914 /* We have already decided whether T1 and T2 are the
3915 same, return the cached result. */
3916 return p->same_p[mode] == 1;
3919 /* Now set up the SCC machinery for the comparison. */
3920 gtc_next_dfs_num = 1;
3921 sccstate = pointer_map_create ();
3922 gcc_obstack_init (&sccstate_obstack);
3923 res = gimple_types_compatible_p_1 (t1, t2, mode, p,
3924 &sccstack, sccstate, &sccstate_obstack);
3925 VEC_free (type_pair_t, heap, sccstack);
3926 pointer_map_destroy (sccstate);
3927 obstack_free (&sccstate_obstack, NULL);
3934 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3935 struct pointer_map_t *, struct obstack *,
3938 /* DFS visit the edge from the callers type with state *STATE to T.
3939 Update the callers type hash V with the hash for T if it is not part
3940 of the SCC containing the callers type and return it.
3941 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3944 visit (tree t, struct sccs *state, hashval_t v,
3945 VEC (tree, heap) **sccstack,
3946 struct pointer_map_t *sccstate,
3947 struct obstack *sccstate_obstack, enum gtc_mode mode)
3949 struct sccs *cstate = NULL;
3950 struct tree_int_map m;
3953 /* If there is a hash value recorded for this type then it can't
3954 possibly be part of our parent SCC. Simply mix in its hash. */
3956 if ((slot = htab_find_slot (mode == GTC_MERGE
3957 ? type_hash_cache : canonical_type_hash_cache,
3960 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3962 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3963 cstate = (struct sccs *)*slot;
3967 /* Not yet visited. DFS recurse. */
3968 tem = iterative_hash_gimple_type (t, v,
3969 sccstack, sccstate, sccstate_obstack,
3972 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3973 state->low = MIN (state->low, cstate->low);
3974 /* If the type is no longer on the SCC stack and thus is not part
3975 of the parents SCC mix in its hash value. Otherwise we will
3976 ignore the type for hashing purposes and return the unaltered
3978 if (!cstate->on_sccstack)
3981 if (cstate->dfsnum < state->dfsnum
3982 && cstate->on_sccstack)
3983 state->low = MIN (cstate->dfsnum, state->low);
3985 /* We are part of our parents SCC, skip this type during hashing
3986 and return the unaltered hash value. */
3990 /* Hash NAME with the previous hash value V and return it. */
3993 iterative_hash_name (tree name, hashval_t v)
3997 if (TREE_CODE (name) == TYPE_DECL)
3998 name = DECL_NAME (name);
4001 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4002 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4005 /* Returning a hash value for gimple type TYPE combined with VAL.
4006 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4008 To hash a type we end up hashing in types that are reachable.
4009 Through pointers we can end up with cycles which messes up the
4010 required property that we need to compute the same hash value
4011 for structurally equivalent types. To avoid this we have to
4012 hash all types in a cycle (the SCC) in a commutative way. The
4013 easiest way is to not mix in the hashes of the SCC members at
4014 all. To make this work we have to delay setting the hash
4015 values of the SCC until it is complete. */
4018 iterative_hash_gimple_type (tree type, hashval_t val,
4019 VEC(tree, heap) **sccstack,
4020 struct pointer_map_t *sccstate,
4021 struct obstack *sccstate_obstack,
4028 /* Not visited during this DFS walk. */
4029 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4030 state = XOBNEW (sccstate_obstack, struct sccs);
4031 *pointer_map_insert (sccstate, type) = state;
4033 VEC_safe_push (tree, heap, *sccstack, type);
4034 state->dfsnum = next_dfs_num++;
4035 state->low = state->dfsnum;
4036 state->on_sccstack = true;
4038 /* Combine a few common features of types so that types are grouped into
4039 smaller sets; when searching for existing matching types to merge,
4040 only existing types having the same features as the new type will be
4042 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4043 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4044 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4046 /* Do not hash the types size as this will cause differences in
4047 hash values for the complete vs. the incomplete type variant. */
4049 /* Incorporate common features of numerical types. */
4050 if (INTEGRAL_TYPE_P (type)
4051 || SCALAR_FLOAT_TYPE_P (type)
4052 || FIXED_POINT_TYPE_P (type))
4054 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4055 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4056 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4059 /* For pointer and reference types, fold in information about the type
4060 pointed to but do not recurse into possibly incomplete types to
4061 avoid hash differences for complete vs. incomplete types. */
4062 if (POINTER_TYPE_P (type))
4064 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4066 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4067 v = iterative_hash_name
4068 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4071 v = visit (TREE_TYPE (type), state, v,
4072 sccstack, sccstate, sccstate_obstack, mode);
4075 /* For integer types hash the types min/max values and the string flag. */
4076 if (TREE_CODE (type) == INTEGER_TYPE)
4078 /* OMP lowering can introduce error_mark_node in place of
4079 random local decls in types. */
4080 if (TYPE_MIN_VALUE (type) != error_mark_node)
4081 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4082 if (TYPE_MAX_VALUE (type) != error_mark_node)
4083 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4084 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4087 /* For array types hash their domain and the string flag. */
4088 if (TREE_CODE (type) == ARRAY_TYPE
4089 && TYPE_DOMAIN (type))
4091 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4092 v = visit (TYPE_DOMAIN (type), state, v,
4093 sccstack, sccstate, sccstate_obstack, mode);
4096 /* Recurse for aggregates with a single element type. */
4097 if (TREE_CODE (type) == ARRAY_TYPE
4098 || TREE_CODE (type) == COMPLEX_TYPE
4099 || TREE_CODE (type) == VECTOR_TYPE)
4100 v = visit (TREE_TYPE (type), state, v,
4101 sccstack, sccstate, sccstate_obstack, mode);
4103 /* Incorporate function return and argument types. */
4104 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4109 /* For method types also incorporate their parent class. */
4110 if (TREE_CODE (type) == METHOD_TYPE)
4111 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4112 sccstack, sccstate, sccstate_obstack, mode);
4114 /* For result types allow mismatch in completeness. */
4115 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4117 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4118 v = iterative_hash_name
4119 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4122 v = visit (TREE_TYPE (type), state, v,
4123 sccstack, sccstate, sccstate_obstack, mode);
4125 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4127 /* For argument types allow mismatch in completeness. */
4128 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4130 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4131 v = iterative_hash_name
4132 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
4135 v = visit (TREE_VALUE (p), state, v,
4136 sccstack, sccstate, sccstate_obstack, mode);
4140 v = iterative_hash_hashval_t (na, v);
4143 if (TREE_CODE (type) == RECORD_TYPE
4144 || TREE_CODE (type) == UNION_TYPE
4145 || TREE_CODE (type) == QUAL_UNION_TYPE)
4150 if (mode == GTC_MERGE)
4151 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
4153 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4155 if (mode == GTC_MERGE)
4156 v = iterative_hash_name (DECL_NAME (f), v);
4157 v = visit (TREE_TYPE (f), state, v,
4158 sccstack, sccstate, sccstate_obstack, mode);
4162 v = iterative_hash_hashval_t (nf, v);
4165 /* Record hash for us. */
4168 /* See if we found an SCC. */
4169 if (state->low == state->dfsnum)
4173 /* Pop off the SCC and set its hash values. */
4176 struct sccs *cstate;
4177 struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
4178 x = VEC_pop (tree, *sccstack);
4179 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4180 cstate->on_sccstack = false;
4182 m->to = cstate->u.hash;
4183 slot = htab_find_slot (mode == GTC_MERGE
4184 ? type_hash_cache : canonical_type_hash_cache,
4186 gcc_assert (!*slot);
4192 return iterative_hash_hashval_t (v, val);
4196 /* Returns a hash value for P (assumed to be a type). The hash value
4197 is computed using some distinguishing features of the type. Note
4198 that we cannot use pointer hashing here as we may be dealing with
4199 two distinct instances of the same type.
4201 This function should produce the same hash value for two compatible
4202 types according to gimple_types_compatible_p. */
4205 gimple_type_hash_1 (const void *p, enum gtc_mode mode)
4207 const_tree t = (const_tree) p;
4208 VEC(tree, heap) *sccstack = NULL;
4209 struct pointer_map_t *sccstate;
4210 struct obstack sccstate_obstack;
4213 struct tree_int_map m;
4215 if (mode == GTC_MERGE
4216 && type_hash_cache == NULL)
4217 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4218 tree_int_map_eq, NULL);
4219 else if (mode == GTC_DIAG
4220 && canonical_type_hash_cache == NULL)
4221 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4222 tree_int_map_eq, NULL);
4224 m.base.from = CONST_CAST_TREE (t);
4225 if ((slot = htab_find_slot (mode == GTC_MERGE
4226 ? type_hash_cache : canonical_type_hash_cache,
4229 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4231 /* Perform a DFS walk and pre-hash all reachable types. */
4233 sccstate = pointer_map_create ();
4234 gcc_obstack_init (&sccstate_obstack);
4235 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4236 &sccstack, sccstate, &sccstate_obstack,
4238 VEC_free (tree, heap, sccstack);
4239 pointer_map_destroy (sccstate);
4240 obstack_free (&sccstate_obstack, NULL);
4246 gimple_type_hash (const void *p)
4248 return gimple_type_hash_1 (p, GTC_MERGE);
4252 gimple_canonical_type_hash (const void *p)
4254 return gimple_type_hash_1 (p, GTC_DIAG);
4258 /* Returns nonzero if P1 and P2 are equal. */
4261 gimple_type_eq (const void *p1, const void *p2)
4263 const_tree t1 = (const_tree) p1;
4264 const_tree t2 = (const_tree) p2;
4265 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4266 CONST_CAST_TREE (t2), GTC_MERGE);
4270 /* Register type T in the global type table gimple_types.
4271 If another type T', compatible with T, already existed in
4272 gimple_types then return T', otherwise return T. This is used by
4273 LTO to merge identical types read from different TUs. */
4276 gimple_register_type (tree t)
4279 gimple_type_leader_entry *leader;
4280 tree mv_leader = NULL_TREE;
4282 gcc_assert (TYPE_P (t));
4284 if (!gimple_type_leader)
4285 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4286 (GIMPLE_TYPE_LEADER_SIZE);
4287 /* If we registered this type before return the cached result. */
4288 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4289 if (leader->type == t)
4290 return leader->leader;
4292 /* Always register the main variant first. This is important so we
4293 pick up the non-typedef variants as canonical, otherwise we'll end
4294 up taking typedef ids for structure tags during comparison. */
4295 if (TYPE_MAIN_VARIANT (t) != t)
4296 mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
4298 if (gimple_types == NULL)
4299 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4301 slot = htab_find_slot (gimple_types, t, INSERT);
4303 && *(tree *)slot != t)
4305 tree new_type = (tree) *((tree *) slot);
4307 /* Do not merge types with different addressability. */
4308 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4310 /* If t is not its main variant then make t unreachable from its
4311 main variant list. Otherwise we'd queue up a lot of duplicates
4313 if (t != TYPE_MAIN_VARIANT (t))
4315 tree tem = TYPE_MAIN_VARIANT (t);
4316 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4317 tem = TYPE_NEXT_VARIANT (tem);
4319 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4320 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4323 /* If we are a pointer then remove us from the pointer-to or
4324 reference-to chain. Otherwise we'd queue up a lot of duplicates
4326 if (TREE_CODE (t) == POINTER_TYPE)
4328 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4329 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4332 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4333 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4334 tem = TYPE_NEXT_PTR_TO (tem);
4336 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4338 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4340 else if (TREE_CODE (t) == REFERENCE_TYPE)
4342 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4343 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4346 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4347 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4348 tem = TYPE_NEXT_REF_TO (tem);
4350 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4352 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4356 leader->leader = new_type;
4363 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4364 if (TYPE_MAIN_VARIANT (t) != t
4365 && TYPE_MAIN_VARIANT (t) != mv_leader)
4367 /* Remove us from our main variant list as we are not the variant
4368 leader and the variant leader will change. */
4369 tree tem = TYPE_MAIN_VARIANT (t);
4370 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4371 tem = TYPE_NEXT_VARIANT (tem);
4373 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4374 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4375 /* Adjust our main variant. Linking us into its variant list
4376 will happen at fixup time. */
4377 TYPE_MAIN_VARIANT (t) = mv_leader;
4386 /* Returns nonzero if P1 and P2 are equal. */
4389 gimple_canonical_type_eq (const void *p1, const void *p2)
4391 const_tree t1 = (const_tree) p1;
4392 const_tree t2 = (const_tree) p2;
4393 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4394 CONST_CAST_TREE (t2), GTC_DIAG);
4397 /* Register type T in the global type table gimple_types.
4398 If another type T', compatible with T, already existed in
4399 gimple_types then return T', otherwise return T. This is used by
4400 LTO to merge identical types read from different TUs. */
4403 gimple_register_canonical_type (tree t)
4408 gcc_assert (TYPE_P (t));
4410 if (TYPE_CANONICAL (t))
4411 return TYPE_CANONICAL (t);
4413 /* Always register the type itself first so that if it turns out
4414 to be the canonical type it will be the one we merge to as well. */
4415 t = gimple_register_type (t);
4417 /* Always register the main variant first. This is important so we
4418 pick up the non-typedef variants as canonical, otherwise we'll end
4419 up taking typedef ids for structure tags during comparison. */
4420 if (TYPE_MAIN_VARIANT (t) != t)
4421 gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
4423 if (gimple_canonical_types == NULL)
4424 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4425 gimple_canonical_type_eq, 0);
4427 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4429 && *(tree *)slot != t)
4431 tree new_type = (tree) *((tree *) slot);
4433 TYPE_CANONICAL (t) = new_type;
4438 TYPE_CANONICAL (t) = t;
4442 /* Also cache the canonical type in the non-leaders. */
4443 TYPE_CANONICAL (orig_t) = t;
4449 /* Show statistics on references to the global type table gimple_types. */
4452 print_gimple_types_stats (void)
4455 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4456 "%ld searches, %ld collisions (ratio: %f)\n",
4457 (long) htab_size (gimple_types),
4458 (long) htab_elements (gimple_types),
4459 (long) gimple_types->searches,
4460 (long) gimple_types->collisions,
4461 htab_collisions (gimple_types));
4463 fprintf (stderr, "GIMPLE type table is empty\n");
4464 if (type_hash_cache)
4465 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4466 "%ld searches, %ld collisions (ratio: %f)\n",
4467 (long) htab_size (type_hash_cache),
4468 (long) htab_elements (type_hash_cache),
4469 (long) type_hash_cache->searches,
4470 (long) type_hash_cache->collisions,
4471 htab_collisions (type_hash_cache));
4473 fprintf (stderr, "GIMPLE type hash table is empty\n");
4474 if (gimple_canonical_types)
4475 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4476 "%ld searches, %ld collisions (ratio: %f)\n",
4477 (long) htab_size (gimple_canonical_types),
4478 (long) htab_elements (gimple_canonical_types),
4479 (long) gimple_canonical_types->searches,
4480 (long) gimple_canonical_types->collisions,
4481 htab_collisions (gimple_canonical_types));
4483 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4484 if (canonical_type_hash_cache)
4485 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4486 "%ld searches, %ld collisions (ratio: %f)\n",
4487 (long) htab_size (canonical_type_hash_cache),
4488 (long) htab_elements (canonical_type_hash_cache),
4489 (long) canonical_type_hash_cache->searches,
4490 (long) canonical_type_hash_cache->collisions,
4491 htab_collisions (canonical_type_hash_cache));
4493 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4495 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
4496 "elements, %ld searches, %ld collisions (ratio: %f)\n",
4497 (long) htab_size (gtc_visited),
4498 (long) htab_elements (gtc_visited),
4499 (long) gtc_visited->searches,
4500 (long) gtc_visited->collisions,
4501 htab_collisions (gtc_visited));
4503 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4506 /* Free the gimple type hashtables used for LTO type merging. */
4509 free_gimple_type_tables (void)
4511 /* Last chance to print stats for the tables. */
4512 if (flag_lto_report)
4513 print_gimple_types_stats ();
4517 htab_delete (gimple_types);
4518 gimple_types = NULL;
4520 if (gimple_canonical_types)
4522 htab_delete (gimple_canonical_types);
4523 gimple_canonical_types = NULL;
4525 if (type_hash_cache)
4527 htab_delete (type_hash_cache);
4528 type_hash_cache = NULL;
4530 if (canonical_type_hash_cache)
4532 htab_delete (canonical_type_hash_cache);
4533 canonical_type_hash_cache = NULL;
4537 htab_delete (gtc_visited);
4538 obstack_free (>c_ob, NULL);
4541 gimple_type_leader = NULL;
4545 /* Return a type the same as TYPE except unsigned or
4546 signed according to UNSIGNEDP. */
4549 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4553 type1 = TYPE_MAIN_VARIANT (type);
4554 if (type1 == signed_char_type_node
4555 || type1 == char_type_node
4556 || type1 == unsigned_char_type_node)
4557 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4558 if (type1 == integer_type_node || type1 == unsigned_type_node)
4559 return unsignedp ? unsigned_type_node : integer_type_node;
4560 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4561 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4562 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4563 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4564 if (type1 == long_long_integer_type_node
4565 || type1 == long_long_unsigned_type_node)
4567 ? long_long_unsigned_type_node
4568 : long_long_integer_type_node;
4569 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4571 ? int128_unsigned_type_node
4572 : int128_integer_type_node;
4573 #if HOST_BITS_PER_WIDE_INT >= 64
4574 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4575 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4577 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4578 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4579 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4580 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4581 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4582 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4583 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4584 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4586 #define GIMPLE_FIXED_TYPES(NAME) \
4587 if (type1 == short_ ## NAME ## _type_node \
4588 || type1 == unsigned_short_ ## NAME ## _type_node) \
4589 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4590 : short_ ## NAME ## _type_node; \
4591 if (type1 == NAME ## _type_node \
4592 || type1 == unsigned_ ## NAME ## _type_node) \
4593 return unsignedp ? unsigned_ ## NAME ## _type_node \
4594 : NAME ## _type_node; \
4595 if (type1 == long_ ## NAME ## _type_node \
4596 || type1 == unsigned_long_ ## NAME ## _type_node) \
4597 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4598 : long_ ## NAME ## _type_node; \
4599 if (type1 == long_long_ ## NAME ## _type_node \
4600 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4601 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4602 : long_long_ ## NAME ## _type_node;
4604 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4605 if (type1 == NAME ## _type_node \
4606 || type1 == u ## NAME ## _type_node) \
4607 return unsignedp ? u ## NAME ## _type_node \
4608 : NAME ## _type_node;
4610 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4611 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4612 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4613 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4614 : sat_ ## short_ ## NAME ## _type_node; \
4615 if (type1 == sat_ ## NAME ## _type_node \
4616 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4617 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4618 : sat_ ## NAME ## _type_node; \
4619 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4620 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4621 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4622 : sat_ ## long_ ## NAME ## _type_node; \
4623 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4624 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4625 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4626 : sat_ ## long_long_ ## NAME ## _type_node;
4628 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4629 if (type1 == sat_ ## NAME ## _type_node \
4630 || type1 == sat_ ## u ## NAME ## _type_node) \
4631 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4632 : sat_ ## NAME ## _type_node;
4634 GIMPLE_FIXED_TYPES (fract);
4635 GIMPLE_FIXED_TYPES_SAT (fract);
4636 GIMPLE_FIXED_TYPES (accum);
4637 GIMPLE_FIXED_TYPES_SAT (accum);
4639 GIMPLE_FIXED_MODE_TYPES (qq);
4640 GIMPLE_FIXED_MODE_TYPES (hq);
4641 GIMPLE_FIXED_MODE_TYPES (sq);
4642 GIMPLE_FIXED_MODE_TYPES (dq);
4643 GIMPLE_FIXED_MODE_TYPES (tq);
4644 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4645 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4646 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4647 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4648 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4649 GIMPLE_FIXED_MODE_TYPES (ha);
4650 GIMPLE_FIXED_MODE_TYPES (sa);
4651 GIMPLE_FIXED_MODE_TYPES (da);
4652 GIMPLE_FIXED_MODE_TYPES (ta);
4653 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4654 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4655 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4656 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4658 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4659 the precision; they have precision set to match their range, but
4660 may use a wider mode to match an ABI. If we change modes, we may
4661 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4662 the precision as well, so as to yield correct results for
4663 bit-field types. C++ does not have these separate bit-field
4664 types, and producing a signed or unsigned variant of an
4665 ENUMERAL_TYPE may cause other problems as well. */
4666 if (!INTEGRAL_TYPE_P (type)
4667 || TYPE_UNSIGNED (type) == unsignedp)
4670 #define TYPE_OK(node) \
4671 (TYPE_MODE (type) == TYPE_MODE (node) \
4672 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4673 if (TYPE_OK (signed_char_type_node))
4674 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4675 if (TYPE_OK (integer_type_node))
4676 return unsignedp ? unsigned_type_node : integer_type_node;
4677 if (TYPE_OK (short_integer_type_node))
4678 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4679 if (TYPE_OK (long_integer_type_node))
4680 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4681 if (TYPE_OK (long_long_integer_type_node))
4683 ? long_long_unsigned_type_node
4684 : long_long_integer_type_node);
4685 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4687 ? int128_unsigned_type_node
4688 : int128_integer_type_node);
4690 #if HOST_BITS_PER_WIDE_INT >= 64
4691 if (TYPE_OK (intTI_type_node))
4692 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4694 if (TYPE_OK (intDI_type_node))
4695 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4696 if (TYPE_OK (intSI_type_node))
4697 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4698 if (TYPE_OK (intHI_type_node))
4699 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4700 if (TYPE_OK (intQI_type_node))
4701 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4703 #undef GIMPLE_FIXED_TYPES
4704 #undef GIMPLE_FIXED_MODE_TYPES
4705 #undef GIMPLE_FIXED_TYPES_SAT
4706 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4709 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4713 /* Return an unsigned type the same as TYPE in other respects. */
4716 gimple_unsigned_type (tree type)
4718 return gimple_signed_or_unsigned_type (true, type);
4722 /* Return a signed type the same as TYPE in other respects. */
4725 gimple_signed_type (tree type)
4727 return gimple_signed_or_unsigned_type (false, type);
4731 /* Return the typed-based alias set for T, which may be an expression
4732 or a type. Return -1 if we don't do anything special. */
4735 gimple_get_alias_set (tree t)
4739 /* Permit type-punning when accessing a union, provided the access
4740 is directly through the union. For example, this code does not
4741 permit taking the address of a union member and then storing
4742 through it. Even the type-punning allowed here is a GCC
4743 extension, albeit a common and useful one; the C standard says
4744 that such accesses have implementation-defined behavior. */
4746 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4747 u = TREE_OPERAND (u, 0))
4748 if (TREE_CODE (u) == COMPONENT_REF
4749 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4752 /* That's all the expressions we handle specially. */
4756 /* For convenience, follow the C standard when dealing with
4757 character types. Any object may be accessed via an lvalue that
4758 has character type. */
4759 if (t == char_type_node
4760 || t == signed_char_type_node
4761 || t == unsigned_char_type_node)
4764 /* Allow aliasing between signed and unsigned variants of the same
4765 type. We treat the signed variant as canonical. */
4766 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4768 tree t1 = gimple_signed_type (t);
4770 /* t1 == t can happen for boolean nodes which are always unsigned. */
4772 return get_alias_set (t1);
4779 /* Data structure used to count the number of dereferences to PTR
4780 inside an expression. */
4784 unsigned num_stores;
4788 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4789 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4792 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4794 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4795 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4797 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4798 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4799 the address of 'fld' as 'ptr + offsetof(fld)'. */
4800 if (TREE_CODE (*tp) == ADDR_EXPR)
4806 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
4809 count_p->num_stores++;
4811 count_p->num_loads++;
4817 /* Count the number of direct and indirect uses for pointer PTR in
4818 statement STMT. The number of direct uses is stored in
4819 *NUM_USES_P. Indirect references are counted separately depending
4820 on whether they are store or load operations. The counts are
4821 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4824 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4825 unsigned *num_loads_p, unsigned *num_stores_p)
4834 /* Find out the total number of uses of PTR in STMT. */
4835 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4839 /* Now count the number of indirect references to PTR. This is
4840 truly awful, but we don't have much choice. There are no parent
4841 pointers inside INDIRECT_REFs, so an expression like
4842 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4843 find all the indirect and direct uses of x_1 inside. The only
4844 shortcut we can take is the fact that GIMPLE only allows
4845 INDIRECT_REFs inside the expressions below. */
4846 if (is_gimple_assign (stmt)
4847 || gimple_code (stmt) == GIMPLE_RETURN
4848 || gimple_code (stmt) == GIMPLE_ASM
4849 || is_gimple_call (stmt))
4851 struct walk_stmt_info wi;
4852 struct count_ptr_d count;
4855 count.num_stores = 0;
4856 count.num_loads = 0;
4858 memset (&wi, 0, sizeof (wi));
4860 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4862 *num_stores_p = count.num_stores;
4863 *num_loads_p = count.num_loads;
4866 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4869 /* From a tree operand OP return the base of a load or store operation
4870 or NULL_TREE if OP is not a load or a store. */
4873 get_base_loadstore (tree op)
4875 while (handled_component_p (op))
4876 op = TREE_OPERAND (op, 0);
4878 || INDIRECT_REF_P (op)
4879 || TREE_CODE (op) == MEM_REF
4880 || TREE_CODE (op) == TARGET_MEM_REF)
4885 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4886 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4887 passing the STMT, the base of the operand and DATA to it. The base
4888 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4889 or the argument of an address expression.
4890 Returns the results of these callbacks or'ed. */
4893 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4894 bool (*visit_load)(gimple, tree, void *),
4895 bool (*visit_store)(gimple, tree, void *),
4896 bool (*visit_addr)(gimple, tree, void *))
4900 if (gimple_assign_single_p (stmt))
4905 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4907 ret |= visit_store (stmt, lhs, data);
4909 rhs = gimple_assign_rhs1 (stmt);
4910 while (handled_component_p (rhs))
4911 rhs = TREE_OPERAND (rhs, 0);
4914 if (TREE_CODE (rhs) == ADDR_EXPR)
4915 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4916 else if (TREE_CODE (rhs) == TARGET_MEM_REF
4917 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4918 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4919 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4920 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4921 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4923 lhs = gimple_assign_lhs (stmt);
4924 if (TREE_CODE (lhs) == TARGET_MEM_REF
4925 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4926 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
4930 rhs = get_base_loadstore (rhs);
4932 ret |= visit_load (stmt, rhs, data);
4936 && (is_gimple_assign (stmt)
4937 || gimple_code (stmt) == GIMPLE_COND))
4939 for (i = 0; i < gimple_num_ops (stmt); ++i)
4940 if (gimple_op (stmt, i)
4941 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4942 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4944 else if (is_gimple_call (stmt))
4948 tree lhs = gimple_call_lhs (stmt);
4951 lhs = get_base_loadstore (lhs);
4953 ret |= visit_store (stmt, lhs, data);
4956 if (visit_load || visit_addr)
4957 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4959 tree rhs = gimple_call_arg (stmt, i);
4961 && TREE_CODE (rhs) == ADDR_EXPR)
4962 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4963 else if (visit_load)
4965 rhs = get_base_loadstore (rhs);
4967 ret |= visit_load (stmt, rhs, data);
4971 && gimple_call_chain (stmt)
4972 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4973 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4976 && gimple_call_return_slot_opt_p (stmt)
4977 && gimple_call_lhs (stmt) != NULL_TREE
4978 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4979 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
4981 else if (gimple_code (stmt) == GIMPLE_ASM)
4984 const char *constraint;
4985 const char **oconstraints;
4986 bool allows_mem, allows_reg, is_inout;
4987 noutputs = gimple_asm_noutputs (stmt);
4988 oconstraints = XALLOCAVEC (const char *, noutputs);
4989 if (visit_store || visit_addr)
4990 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4992 tree link = gimple_asm_output_op (stmt, i);
4993 tree op = get_base_loadstore (TREE_VALUE (link));
4994 if (op && visit_store)
4995 ret |= visit_store (stmt, op, data);
4998 constraint = TREE_STRING_POINTER
4999 (TREE_VALUE (TREE_PURPOSE (link)));
5000 oconstraints[i] = constraint;
5001 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5002 &allows_reg, &is_inout);
5003 if (op && !allows_reg && allows_mem)
5004 ret |= visit_addr (stmt, op, data);
5007 if (visit_load || visit_addr)
5008 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5010 tree link = gimple_asm_input_op (stmt, i);
5011 tree op = TREE_VALUE (link);
5013 && TREE_CODE (op) == ADDR_EXPR)
5014 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5015 else if (visit_load || visit_addr)
5017 op = get_base_loadstore (op);
5021 ret |= visit_load (stmt, op, data);
5024 constraint = TREE_STRING_POINTER
5025 (TREE_VALUE (TREE_PURPOSE (link)));
5026 parse_input_constraint (&constraint, 0, 0, noutputs,
5028 &allows_mem, &allows_reg);
5029 if (!allows_reg && allows_mem)
5030 ret |= visit_addr (stmt, op, data);
5036 else if (gimple_code (stmt) == GIMPLE_RETURN)
5038 tree op = gimple_return_retval (stmt);
5042 && TREE_CODE (op) == ADDR_EXPR)
5043 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5044 else if (visit_load)
5046 op = get_base_loadstore (op);
5048 ret |= visit_load (stmt, op, data);
5053 && gimple_code (stmt) == GIMPLE_PHI)
5055 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5057 tree op = PHI_ARG_DEF (stmt, i);
5058 if (TREE_CODE (op) == ADDR_EXPR)
5059 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5066 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5067 should make a faster clone for this case. */
5070 walk_stmt_load_store_ops (gimple stmt, void *data,
5071 bool (*visit_load)(gimple, tree, void *),
5072 bool (*visit_store)(gimple, tree, void *))
5074 return walk_stmt_load_store_addr_ops (stmt, data,
5075 visit_load, visit_store, NULL);
5078 /* Helper for gimple_ior_addresses_taken_1. */
5081 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5082 tree addr, void *data)
5084 bitmap addresses_taken = (bitmap)data;
5085 addr = get_base_address (addr);
5089 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5095 /* Set the bit for the uid of all decls that have their address taken
5096 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5097 were any in this stmt. */
5100 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5102 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5103 gimple_ior_addresses_taken_1);
5107 /* Return a printable name for symbol DECL. */
5110 gimple_decl_printable_name (tree decl, int verbosity)
5112 if (!DECL_NAME (decl))
5115 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5117 const char *str, *mangled_str;
5118 int dmgl_opts = DMGL_NO_OPTS;
5122 dmgl_opts = DMGL_VERBOSE
5126 if (TREE_CODE (decl) == FUNCTION_DECL)
5127 dmgl_opts |= DMGL_PARAMS;
5130 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5131 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5132 return (str) ? str : mangled_str;
5135 return IDENTIFIER_POINTER (DECL_NAME (decl));
5138 /* Return true when STMT is builtins call to CODE. */
5141 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5144 return (is_gimple_call (stmt)
5145 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5146 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5147 && DECL_FUNCTION_CODE (fndecl) == code);
5150 /* Return true if STMT clobbers memory. STMT is required to be a
5154 gimple_asm_clobbers_memory_p (const_gimple stmt)
5158 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5160 tree op = gimple_asm_clobber_op (stmt, i);
5161 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5167 #include "gt-gimple.h"