1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
53 /* Global type comparison cache. This is by TYPE_UID for space efficiency
54 and thus cannot use and does not need GC. */
55 static htab_t gtc_visited;
56 static struct obstack gtc_ob;
58 /* All the tuples have their operand vector (if present) at the very bottom
59 of the structure. Therefore, the offset required to find the
60 operands vector the size of the structure minus the size of the 1
61 element tree array at the end (see gimple_ops). */
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
63 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
64 EXPORTED_CONST size_t gimple_ops_offset_[] = {
65 #include "gsstruct.def"
69 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
70 static const size_t gsstruct_code_size[] = {
71 #include "gsstruct.def"
75 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
76 const char *const gimple_code_name[] = {
81 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
82 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
87 #ifdef GATHER_STATISTICS
90 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
91 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
93 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
94 static const char * const gimple_alloc_kind_names[] = {
102 #endif /* GATHER_STATISTICS */
104 /* A cache of gimple_seq objects. Sequences are created and destroyed
105 fairly often during gimplification. */
106 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
108 /* Private API manipulation functions shared only with some
110 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
111 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
113 /* Gimple tuple constructors.
114 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
115 be passed a NULL to start with an empty sequence. */
117 /* Set the code for statement G to CODE. */
120 gimple_set_code (gimple g, enum gimple_code code)
122 g->gsbase.code = code;
125 /* Return the number of bytes needed to hold a GIMPLE statement with
129 gimple_size (enum gimple_code code)
131 return gsstruct_code_size[gss_for_code (code)];
134 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
138 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
143 size = gimple_size (code);
145 size += sizeof (tree) * (num_ops - 1);
147 #ifdef GATHER_STATISTICS
149 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
150 gimple_alloc_counts[(int) kind]++;
151 gimple_alloc_sizes[(int) kind] += size;
155 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
156 gimple_set_code (stmt, code);
157 gimple_set_num_ops (stmt, num_ops);
159 /* Do not call gimple_set_modified here as it has other side
160 effects and this tuple is still not completely built. */
161 stmt->gsbase.modified = 1;
166 /* Set SUBCODE to be the code of the expression computed by statement G. */
169 gimple_set_subcode (gimple g, unsigned subcode)
171 /* We only have 16 bits for the RHS code. Assert that we are not
173 gcc_assert (subcode < (1 << 16));
174 g->gsbase.subcode = subcode;
179 /* Build a tuple with operands. CODE is the statement to build (which
180 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
181 for the new tuple. NUM_OPS is the number of operands to allocate. */
183 #define gimple_build_with_ops(c, s, n) \
184 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
187 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
188 unsigned num_ops MEM_STAT_DECL)
190 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
191 gimple_set_subcode (s, subcode);
197 /* Build a GIMPLE_RETURN statement returning RETVAL. */
200 gimple_build_return (tree retval)
202 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
204 gimple_return_set_retval (s, retval);
208 /* Reset alias information on call S. */
211 gimple_call_reset_alias_info (gimple s)
213 if (gimple_call_flags (s) & ECF_CONST)
214 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
216 pt_solution_reset (gimple_call_use_set (s));
217 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
218 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
220 pt_solution_reset (gimple_call_clobber_set (s));
223 /* Helper for gimple_build_call, gimple_build_call_vec and
224 gimple_build_call_from_tree. Build the basic components of a
225 GIMPLE_CALL statement to function FN with NARGS arguments. */
228 gimple_build_call_1 (tree fn, unsigned nargs)
230 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
231 if (TREE_CODE (fn) == FUNCTION_DECL)
232 fn = build_fold_addr_expr (fn);
233 gimple_set_op (s, 1, fn);
234 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235 gimple_call_reset_alias_info (s);
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241 specified in vector ARGS. */
244 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
247 unsigned nargs = VEC_length (tree, args);
248 gimple call = gimple_build_call_1 (fn, nargs);
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
257 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
258 arguments. The ... are the arguments. */
261 gimple_build_call (tree fn, unsigned nargs, ...)
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
269 call = gimple_build_call_1 (fn, nargs);
271 va_start (ap, nargs);
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
280 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
281 assumed to be in GIMPLE form already. Minimal checking is done of
285 gimple_build_call_from_tree (tree t)
289 tree fndecl = get_callee_fndecl (t);
291 gcc_assert (TREE_CODE (t) == CALL_EXPR);
293 nargs = call_expr_nargs (t);
294 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
296 for (i = 0; i < nargs; i++)
297 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
299 gimple_set_block (call, TREE_BLOCK (t));
301 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
302 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
303 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
304 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
305 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
306 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
307 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
308 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
309 gimple_set_no_warning (call, TREE_NO_WARNING (t));
315 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
316 *OP1_P, *OP2_P and *OP3_P respectively. */
319 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
320 tree *op2_p, tree *op3_p)
322 enum gimple_rhs_class grhs_class;
324 *subcode_p = TREE_CODE (expr);
325 grhs_class = get_gimple_rhs_class (*subcode_p);
327 if (grhs_class == GIMPLE_TERNARY_RHS)
329 *op1_p = TREE_OPERAND (expr, 0);
330 *op2_p = TREE_OPERAND (expr, 1);
331 *op3_p = TREE_OPERAND (expr, 2);
333 else if (grhs_class == GIMPLE_BINARY_RHS)
335 *op1_p = TREE_OPERAND (expr, 0);
336 *op2_p = TREE_OPERAND (expr, 1);
339 else if (grhs_class == GIMPLE_UNARY_RHS)
341 *op1_p = TREE_OPERAND (expr, 0);
345 else if (grhs_class == GIMPLE_SINGLE_RHS)
356 /* Build a GIMPLE_ASSIGN statement.
358 LHS of the assignment.
359 RHS of the assignment which can be unary or binary. */
362 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
364 enum tree_code subcode;
367 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
368 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
373 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
374 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
375 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
378 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
379 tree op2, tree op3 MEM_STAT_DECL)
384 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
386 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
388 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
390 gimple_assign_set_lhs (p, lhs);
391 gimple_assign_set_rhs1 (p, op1);
394 gcc_assert (num_ops > 2);
395 gimple_assign_set_rhs2 (p, op2);
400 gcc_assert (num_ops > 3);
401 gimple_assign_set_rhs3 (p, op3);
408 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
410 DST/SRC are the destination and source respectively. You can pass
411 ungimplified trees in DST or SRC, in which case they will be
412 converted to a gimple operand if necessary.
414 This function returns the newly created GIMPLE_ASSIGN tuple. */
417 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
419 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
420 gimplify_and_add (t, seq_p);
422 return gimple_seq_last_stmt (*seq_p);
426 /* Build a GIMPLE_COND statement.
428 PRED is the condition used to compare LHS and the RHS.
429 T_LABEL is the label to jump to if the condition is true.
430 F_LABEL is the label to jump to otherwise. */
433 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
434 tree t_label, tree f_label)
438 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
439 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
440 gimple_cond_set_lhs (p, lhs);
441 gimple_cond_set_rhs (p, rhs);
442 gimple_cond_set_true_label (p, t_label);
443 gimple_cond_set_false_label (p, f_label);
448 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
451 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
452 tree *lhs_p, tree *rhs_p)
454 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
455 || TREE_CODE (cond) == TRUTH_NOT_EXPR
456 || is_gimple_min_invariant (cond)
457 || SSA_VAR_P (cond));
459 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
461 /* Canonicalize conditionals of the form 'if (!VAL)'. */
462 if (*code_p == TRUTH_NOT_EXPR)
465 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
466 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
468 /* Canonicalize conditionals of the form 'if (VAL)' */
469 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
472 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
473 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
478 /* Build a GIMPLE_COND statement from the conditional expression tree
479 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
482 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
487 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
488 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
491 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
492 boolean expression tree COND. */
495 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
500 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
501 gimple_cond_set_condition (stmt, code, lhs, rhs);
504 /* Build a GIMPLE_LABEL statement for LABEL. */
507 gimple_build_label (tree label)
509 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
510 gimple_label_set_label (p, label);
514 /* Build a GIMPLE_GOTO statement to label DEST. */
517 gimple_build_goto (tree dest)
519 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
520 gimple_goto_set_dest (p, dest);
525 /* Build a GIMPLE_NOP statement. */
528 gimple_build_nop (void)
530 return gimple_alloc (GIMPLE_NOP, 0);
534 /* Build a GIMPLE_BIND statement.
535 VARS are the variables in BODY.
536 BLOCK is the containing block. */
539 gimple_build_bind (tree vars, gimple_seq body, tree block)
541 gimple p = gimple_alloc (GIMPLE_BIND, 0);
542 gimple_bind_set_vars (p, vars);
544 gimple_bind_set_body (p, body);
546 gimple_bind_set_block (p, block);
550 /* Helper function to set the simple fields of a asm stmt.
552 STRING is a pointer to a string that is the asm blocks assembly code.
553 NINPUT is the number of register inputs.
554 NOUTPUT is the number of register outputs.
555 NCLOBBERS is the number of clobbered registers.
559 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
560 unsigned nclobbers, unsigned nlabels)
563 int size = strlen (string);
565 /* ASMs with labels cannot have outputs. This should have been
566 enforced by the front end. */
567 gcc_assert (nlabels == 0 || noutputs == 0);
569 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
570 ninputs + noutputs + nclobbers + nlabels);
572 p->gimple_asm.ni = ninputs;
573 p->gimple_asm.no = noutputs;
574 p->gimple_asm.nc = nclobbers;
575 p->gimple_asm.nl = nlabels;
576 p->gimple_asm.string = ggc_alloc_string (string, size);
578 #ifdef GATHER_STATISTICS
579 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
585 /* Build a GIMPLE_ASM statement.
587 STRING is the assembly code.
588 NINPUT is the number of register inputs.
589 NOUTPUT is the number of register outputs.
590 NCLOBBERS is the number of clobbered registers.
591 INPUTS is a vector of the input register parameters.
592 OUTPUTS is a vector of the output register parameters.
593 CLOBBERS is a vector of the clobbered register parameters.
594 LABELS is a vector of destination labels. */
597 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
598 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
599 VEC(tree,gc)* labels)
604 p = gimple_build_asm_1 (string,
605 VEC_length (tree, inputs),
606 VEC_length (tree, outputs),
607 VEC_length (tree, clobbers),
608 VEC_length (tree, labels));
610 for (i = 0; i < VEC_length (tree, inputs); i++)
611 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
613 for (i = 0; i < VEC_length (tree, outputs); i++)
614 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
616 for (i = 0; i < VEC_length (tree, clobbers); i++)
617 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
619 for (i = 0; i < VEC_length (tree, labels); i++)
620 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
625 /* Build a GIMPLE_CATCH statement.
627 TYPES are the catch types.
628 HANDLER is the exception handler. */
631 gimple_build_catch (tree types, gimple_seq handler)
633 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
634 gimple_catch_set_types (p, types);
636 gimple_catch_set_handler (p, handler);
641 /* Build a GIMPLE_EH_FILTER statement.
643 TYPES are the filter's types.
644 FAILURE is the filter's failure action. */
647 gimple_build_eh_filter (tree types, gimple_seq failure)
649 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
650 gimple_eh_filter_set_types (p, types);
652 gimple_eh_filter_set_failure (p, failure);
657 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
660 gimple_build_eh_must_not_throw (tree decl)
662 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
664 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
665 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
666 gimple_eh_must_not_throw_set_fndecl (p, decl);
671 /* Build a GIMPLE_TRY statement.
673 EVAL is the expression to evaluate.
674 CLEANUP is the cleanup expression.
675 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
676 whether this is a try/catch or a try/finally respectively. */
679 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
680 enum gimple_try_flags kind)
684 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
685 p = gimple_alloc (GIMPLE_TRY, 0);
686 gimple_set_subcode (p, kind);
688 gimple_try_set_eval (p, eval);
690 gimple_try_set_cleanup (p, cleanup);
695 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
697 CLEANUP is the cleanup expression. */
700 gimple_build_wce (gimple_seq cleanup)
702 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
704 gimple_wce_set_cleanup (p, cleanup);
710 /* Build a GIMPLE_RESX statement. */
713 gimple_build_resx (int region)
715 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
716 p->gimple_eh_ctrl.region = region;
721 /* The helper for constructing a gimple switch statement.
722 INDEX is the switch's index.
723 NLABELS is the number of labels in the switch excluding the default.
724 DEFAULT_LABEL is the default label for the switch statement. */
727 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
729 /* nlabels + 1 default label + 1 index. */
730 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
731 1 + (default_label != NULL) + nlabels);
732 gimple_switch_set_index (p, index);
734 gimple_switch_set_default_label (p, default_label);
739 /* Build a GIMPLE_SWITCH statement.
741 INDEX is the switch's index.
742 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
743 ... are the labels excluding the default. */
746 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
750 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
752 /* Store the rest of the labels. */
753 va_start (al, default_label);
754 offset = (default_label != NULL);
755 for (i = 0; i < nlabels; i++)
756 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
763 /* Build a GIMPLE_SWITCH statement.
765 INDEX is the switch's index.
766 DEFAULT_LABEL is the default label
767 ARGS is a vector of labels excluding the default. */
770 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
772 unsigned i, offset, nlabels = VEC_length (tree, args);
773 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
775 /* Copy the labels from the vector to the switch statement. */
776 offset = (default_label != NULL);
777 for (i = 0; i < nlabels; i++)
778 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
783 /* Build a GIMPLE_EH_DISPATCH statement. */
786 gimple_build_eh_dispatch (int region)
788 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
789 p->gimple_eh_ctrl.region = region;
793 /* Build a new GIMPLE_DEBUG_BIND statement.
795 VAR is bound to VALUE; block and location are taken from STMT. */
798 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
800 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
801 (unsigned)GIMPLE_DEBUG_BIND, 2
804 gimple_debug_bind_set_var (p, var);
805 gimple_debug_bind_set_value (p, value);
808 gimple_set_block (p, gimple_block (stmt));
809 gimple_set_location (p, gimple_location (stmt));
816 /* Build a GIMPLE_OMP_CRITICAL statement.
818 BODY is the sequence of statements for which only one thread can execute.
819 NAME is optional identifier for this critical block. */
822 gimple_build_omp_critical (gimple_seq body, tree name)
824 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
825 gimple_omp_critical_set_name (p, name);
827 gimple_omp_set_body (p, body);
832 /* Build a GIMPLE_OMP_FOR statement.
834 BODY is sequence of statements inside the for loop.
835 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
836 lastprivate, reductions, ordered, schedule, and nowait.
837 COLLAPSE is the collapse count.
838 PRE_BODY is the sequence of statements that are loop invariant. */
841 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
844 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
846 gimple_omp_set_body (p, body);
847 gimple_omp_for_set_clauses (p, clauses);
848 p->gimple_omp_for.collapse = collapse;
849 p->gimple_omp_for.iter
850 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
852 gimple_omp_for_set_pre_body (p, pre_body);
858 /* Build a GIMPLE_OMP_PARALLEL statement.
860 BODY is sequence of statements which are executed in parallel.
861 CLAUSES, are the OMP parallel construct's clauses.
862 CHILD_FN is the function created for the parallel threads to execute.
863 DATA_ARG are the shared data argument(s). */
866 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
869 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
871 gimple_omp_set_body (p, body);
872 gimple_omp_parallel_set_clauses (p, clauses);
873 gimple_omp_parallel_set_child_fn (p, child_fn);
874 gimple_omp_parallel_set_data_arg (p, data_arg);
880 /* Build a GIMPLE_OMP_TASK statement.
882 BODY is sequence of statements which are executed by the explicit task.
883 CLAUSES, are the OMP parallel construct's clauses.
884 CHILD_FN is the function created for the parallel threads to execute.
885 DATA_ARG are the shared data argument(s).
886 COPY_FN is the optional function for firstprivate initialization.
887 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
890 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
891 tree data_arg, tree copy_fn, tree arg_size,
894 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
896 gimple_omp_set_body (p, body);
897 gimple_omp_task_set_clauses (p, clauses);
898 gimple_omp_task_set_child_fn (p, child_fn);
899 gimple_omp_task_set_data_arg (p, data_arg);
900 gimple_omp_task_set_copy_fn (p, copy_fn);
901 gimple_omp_task_set_arg_size (p, arg_size);
902 gimple_omp_task_set_arg_align (p, arg_align);
908 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
910 BODY is the sequence of statements in the section. */
913 gimple_build_omp_section (gimple_seq body)
915 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
917 gimple_omp_set_body (p, body);
923 /* Build a GIMPLE_OMP_MASTER statement.
925 BODY is the sequence of statements to be executed by just the master. */
928 gimple_build_omp_master (gimple_seq body)
930 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
932 gimple_omp_set_body (p, body);
938 /* Build a GIMPLE_OMP_CONTINUE statement.
940 CONTROL_DEF is the definition of the control variable.
941 CONTROL_USE is the use of the control variable. */
944 gimple_build_omp_continue (tree control_def, tree control_use)
946 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
947 gimple_omp_continue_set_control_def (p, control_def);
948 gimple_omp_continue_set_control_use (p, control_use);
952 /* Build a GIMPLE_OMP_ORDERED statement.
954 BODY is the sequence of statements inside a loop that will executed in
958 gimple_build_omp_ordered (gimple_seq body)
960 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
962 gimple_omp_set_body (p, body);
968 /* Build a GIMPLE_OMP_RETURN statement.
969 WAIT_P is true if this is a non-waiting return. */
972 gimple_build_omp_return (bool wait_p)
974 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
976 gimple_omp_return_set_nowait (p);
982 /* Build a GIMPLE_OMP_SECTIONS statement.
984 BODY is a sequence of section statements.
985 CLAUSES are any of the OMP sections contsruct's clauses: private,
986 firstprivate, lastprivate, reduction, and nowait. */
989 gimple_build_omp_sections (gimple_seq body, tree clauses)
991 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
993 gimple_omp_set_body (p, body);
994 gimple_omp_sections_set_clauses (p, clauses);
1000 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1003 gimple_build_omp_sections_switch (void)
1005 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1009 /* Build a GIMPLE_OMP_SINGLE statement.
1011 BODY is the sequence of statements that will be executed once.
1012 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1013 copyprivate, nowait. */
1016 gimple_build_omp_single (gimple_seq body, tree clauses)
1018 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1020 gimple_omp_set_body (p, body);
1021 gimple_omp_single_set_clauses (p, clauses);
1027 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1030 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1032 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1033 gimple_omp_atomic_load_set_lhs (p, lhs);
1034 gimple_omp_atomic_load_set_rhs (p, rhs);
1038 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1040 VAL is the value we are storing. */
1043 gimple_build_omp_atomic_store (tree val)
1045 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1046 gimple_omp_atomic_store_set_val (p, val);
1050 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1051 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1054 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1056 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1057 /* Ensure all the predictors fit into the lower bits of the subcode. */
1058 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1059 gimple_predict_set_predictor (p, predictor);
1060 gimple_predict_set_outcome (p, outcome);
1064 #if defined ENABLE_GIMPLE_CHECKING
1065 /* Complain of a gimple type mismatch and die. */
1068 gimple_check_failed (const_gimple gs, const char *file, int line,
1069 const char *function, enum gimple_code code,
1070 enum tree_code subcode)
1072 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1073 gimple_code_name[code],
1074 tree_code_name[subcode],
1075 gimple_code_name[gimple_code (gs)],
1076 gs->gsbase.subcode > 0
1077 ? tree_code_name[gs->gsbase.subcode]
1079 function, trim_filename (file), line);
1081 #endif /* ENABLE_GIMPLE_CHECKING */
1084 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1085 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1089 gimple_seq_alloc (void)
1091 gimple_seq seq = gimple_seq_cache;
1094 gimple_seq_cache = gimple_seq_cache->next_free;
1095 gcc_assert (gimple_seq_cache != seq);
1096 memset (seq, 0, sizeof (*seq));
1100 seq = ggc_alloc_cleared_gimple_seq_d ();
1101 #ifdef GATHER_STATISTICS
1102 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1103 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1110 /* Return SEQ to the free pool of GIMPLE sequences. */
1113 gimple_seq_free (gimple_seq seq)
1118 gcc_assert (gimple_seq_first (seq) == NULL);
1119 gcc_assert (gimple_seq_last (seq) == NULL);
1121 /* If this triggers, it's a sign that the same list is being freed
1123 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1125 /* Add SEQ to the pool of free sequences. */
1126 seq->next_free = gimple_seq_cache;
1127 gimple_seq_cache = seq;
1131 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1132 *SEQ_P is NULL, a new sequence is allocated. */
1135 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1137 gimple_stmt_iterator si;
1143 *seq_p = gimple_seq_alloc ();
1145 si = gsi_last (*seq_p);
1146 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1150 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1151 NULL, a new sequence is allocated. */
1154 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1156 gimple_stmt_iterator si;
1162 *dst_p = gimple_seq_alloc ();
1164 si = gsi_last (*dst_p);
1165 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1169 /* Helper function of empty_body_p. Return true if STMT is an empty
1173 empty_stmt_p (gimple stmt)
1175 if (gimple_code (stmt) == GIMPLE_NOP)
1177 if (gimple_code (stmt) == GIMPLE_BIND)
1178 return empty_body_p (gimple_bind_body (stmt));
1183 /* Return true if BODY contains nothing but empty statements. */
1186 empty_body_p (gimple_seq body)
1188 gimple_stmt_iterator i;
1190 if (gimple_seq_empty_p (body))
1192 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1193 if (!empty_stmt_p (gsi_stmt (i))
1194 && !is_gimple_debug (gsi_stmt (i)))
1201 /* Perform a deep copy of sequence SRC and return the result. */
1204 gimple_seq_copy (gimple_seq src)
1206 gimple_stmt_iterator gsi;
1207 gimple_seq new_seq = gimple_seq_alloc ();
1210 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1212 stmt = gimple_copy (gsi_stmt (gsi));
1213 gimple_seq_add_stmt (&new_seq, stmt);
1220 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1221 on each one. WI is as in walk_gimple_stmt.
1223 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1224 value is stored in WI->CALLBACK_RESULT and the statement that
1225 produced the value is returned.
1227 Otherwise, all the statements are walked and NULL returned. */
1230 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1231 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1233 gimple_stmt_iterator gsi;
1235 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1237 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1240 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1243 wi->callback_result = ret;
1244 return gsi_stmt (gsi);
1249 wi->callback_result = NULL_TREE;
1255 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1258 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1259 struct walk_stmt_info *wi)
1263 const char **oconstraints;
1265 const char *constraint;
1266 bool allows_mem, allows_reg, is_inout;
1268 noutputs = gimple_asm_noutputs (stmt);
1269 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1274 for (i = 0; i < noutputs; i++)
1276 op = gimple_asm_output_op (stmt, i);
1277 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1278 oconstraints[i] = constraint;
1279 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1282 wi->val_only = (allows_reg || !allows_mem);
1283 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1288 n = gimple_asm_ninputs (stmt);
1289 for (i = 0; i < n; i++)
1291 op = gimple_asm_input_op (stmt, i);
1292 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1293 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1294 oconstraints, &allows_mem, &allows_reg);
1297 wi->val_only = (allows_reg || !allows_mem);
1298 /* Although input "m" is not really a LHS, we need a lvalue. */
1299 wi->is_lhs = !wi->val_only;
1301 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1309 wi->val_only = true;
1312 n = gimple_asm_nlabels (stmt);
1313 for (i = 0; i < n; i++)
1315 op = gimple_asm_label_op (stmt, i);
1316 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1325 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1326 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1328 CALLBACK_OP is called on each operand of STMT via walk_tree.
1329 Additional parameters to walk_tree must be stored in WI. For each operand
1330 OP, walk_tree is called as:
1332 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1334 If CALLBACK_OP returns non-NULL for an operand, the remaining
1335 operands are not scanned.
1337 The return value is that returned by the last call to walk_tree, or
1338 NULL_TREE if no CALLBACK_OP is specified. */
1341 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1342 struct walk_stmt_info *wi)
1344 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1346 tree ret = NULL_TREE;
1348 switch (gimple_code (stmt))
1351 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1352 is a register variable, we may use a COMPONENT_REF on the RHS. */
1355 tree lhs = gimple_assign_lhs (stmt);
1357 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1358 || !gimple_assign_single_p (stmt);
1361 for (i = 1; i < gimple_num_ops (stmt); i++)
1363 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1369 /* Walk the LHS. If the RHS is appropriate for a memory, we
1370 may use a COMPONENT_REF on the LHS. */
1373 /* If the RHS has more than 1 operand, it is not appropriate
1375 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1376 || !gimple_assign_single_p (stmt);
1380 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1386 wi->val_only = true;
1395 wi->val_only = true;
1398 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1402 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1406 for (i = 0; i < gimple_call_num_args (stmt); i++)
1409 wi->val_only = is_gimple_reg_type (gimple_call_arg (stmt, i));
1410 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1416 if (gimple_call_lhs (stmt))
1421 wi->val_only = is_gimple_reg_type (gimple_call_lhs (stmt));
1424 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1432 wi->val_only = true;
1437 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1443 case GIMPLE_EH_FILTER:
1444 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1451 ret = walk_gimple_asm (stmt, callback_op, wi);
1456 case GIMPLE_OMP_CONTINUE:
1457 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1458 callback_op, wi, pset);
1462 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1463 callback_op, wi, pset);
1468 case GIMPLE_OMP_CRITICAL:
1469 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1475 case GIMPLE_OMP_FOR:
1476 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1480 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1482 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1486 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1490 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1494 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1501 case GIMPLE_OMP_PARALLEL:
1502 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1506 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1510 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1516 case GIMPLE_OMP_TASK:
1517 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1521 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1525 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1529 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1533 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1537 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1543 case GIMPLE_OMP_SECTIONS:
1544 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1549 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1556 case GIMPLE_OMP_SINGLE:
1557 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1563 case GIMPLE_OMP_ATOMIC_LOAD:
1564 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1569 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1575 case GIMPLE_OMP_ATOMIC_STORE:
1576 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1582 /* Tuples that do not have operands. */
1585 case GIMPLE_OMP_RETURN:
1586 case GIMPLE_PREDICT:
1591 enum gimple_statement_structure_enum gss;
1592 gss = gimple_statement_structure (stmt);
1593 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1594 for (i = 0; i < gimple_num_ops (stmt); i++)
1596 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1608 /* Walk the current statement in GSI (optionally using traversal state
1609 stored in WI). If WI is NULL, no state is kept during traversal.
1610 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1611 that it has handled all the operands of the statement, its return
1612 value is returned. Otherwise, the return value from CALLBACK_STMT
1613 is discarded and its operands are scanned.
1615 If CALLBACK_STMT is NULL or it didn't handle the operands,
1616 CALLBACK_OP is called on each operand of the statement via
1617 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1618 operand, the remaining operands are not scanned. In this case, the
1619 return value from CALLBACK_OP is returned.
1621 In any other case, NULL_TREE is returned. */
1624 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1625 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1629 gimple stmt = gsi_stmt (*gsi);
1634 if (wi && wi->want_locations && gimple_has_location (stmt))
1635 input_location = gimple_location (stmt);
1639 /* Invoke the statement callback. Return if the callback handled
1640 all of STMT operands by itself. */
1643 bool handled_ops = false;
1644 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1648 /* If CALLBACK_STMT did not handle operands, it should not have
1649 a value to return. */
1650 gcc_assert (tree_ret == NULL);
1652 /* Re-read stmt in case the callback changed it. */
1653 stmt = gsi_stmt (*gsi);
1656 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1659 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1664 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1665 switch (gimple_code (stmt))
1668 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1671 return wi->callback_result;
1675 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1678 return wi->callback_result;
1681 case GIMPLE_EH_FILTER:
1682 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1685 return wi->callback_result;
1689 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1692 return wi->callback_result;
1694 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1697 return wi->callback_result;
1700 case GIMPLE_OMP_FOR:
1701 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1704 return wi->callback_result;
1707 case GIMPLE_OMP_CRITICAL:
1708 case GIMPLE_OMP_MASTER:
1709 case GIMPLE_OMP_ORDERED:
1710 case GIMPLE_OMP_SECTION:
1711 case GIMPLE_OMP_PARALLEL:
1712 case GIMPLE_OMP_TASK:
1713 case GIMPLE_OMP_SECTIONS:
1714 case GIMPLE_OMP_SINGLE:
1715 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1718 return wi->callback_result;
1721 case GIMPLE_WITH_CLEANUP_EXPR:
1722 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1725 return wi->callback_result;
1729 gcc_assert (!gimple_has_substatements (stmt));
1737 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1740 gimple_set_body (tree fndecl, gimple_seq seq)
1742 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1745 /* If FNDECL still does not have a function structure associated
1746 with it, then it does not make sense for it to receive a
1748 gcc_assert (seq == NULL);
1751 fn->gimple_body = seq;
1755 /* Return the body of GIMPLE statements for function FN. After the
1756 CFG pass, the function body doesn't exist anymore because it has
1757 been split up into basic blocks. In this case, it returns
1761 gimple_body (tree fndecl)
1763 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1764 return fn ? fn->gimple_body : NULL;
1767 /* Return true when FNDECL has Gimple body either in unlowered
1770 gimple_has_body_p (tree fndecl)
1772 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1773 return (gimple_body (fndecl) || (fn && fn->cfg));
1776 /* Detect flags from a GIMPLE_CALL. This is just like
1777 call_expr_flags, but for gimple tuples. */
1780 gimple_call_flags (const_gimple stmt)
1783 tree decl = gimple_call_fndecl (stmt);
1786 flags = flags_from_decl_or_type (decl);
1788 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1790 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1791 flags |= ECF_NOTHROW;
1796 /* Detects argument flags for argument number ARG on call STMT. */
1799 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1801 tree type = gimple_call_fntype (stmt);
1802 tree attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1806 attr = TREE_VALUE (TREE_VALUE (attr));
1807 if (1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1810 switch (TREE_STRING_POINTER (attr)[1 + arg])
1817 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1820 return EAF_NOCLOBBER | EAF_NOESCAPE;
1823 return EAF_DIRECT | EAF_NOESCAPE;
1826 return EAF_NOESCAPE;
1834 /* Detects return flags for the call STMT. */
1837 gimple_call_return_flags (const_gimple stmt)
1840 tree attr = NULL_TREE;
1842 if (gimple_call_flags (stmt) & ECF_MALLOC)
1845 type = gimple_call_fntype (stmt);
1846 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1850 attr = TREE_VALUE (TREE_VALUE (attr));
1851 if (TREE_STRING_LENGTH (attr) < 1)
1854 switch (TREE_STRING_POINTER (attr)[0])
1860 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1872 /* Return true if GS is a copy assignment. */
1875 gimple_assign_copy_p (gimple gs)
1877 return (gimple_assign_single_p (gs)
1878 && is_gimple_val (gimple_op (gs, 1)));
1882 /* Return true if GS is a SSA_NAME copy assignment. */
1885 gimple_assign_ssa_name_copy_p (gimple gs)
1887 return (gimple_assign_single_p (gs)
1888 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1889 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1893 /* Return true if GS is an assignment with a unary RHS, but the
1894 operator has no effect on the assigned value. The logic is adapted
1895 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1896 instances in which STRIP_NOPS was previously applied to the RHS of
1899 NOTE: In the use cases that led to the creation of this function
1900 and of gimple_assign_single_p, it is typical to test for either
1901 condition and to proceed in the same manner. In each case, the
1902 assigned value is represented by the single RHS operand of the
1903 assignment. I suspect there may be cases where gimple_assign_copy_p,
1904 gimple_assign_single_p, or equivalent logic is used where a similar
1905 treatment of unary NOPs is appropriate. */
1908 gimple_assign_unary_nop_p (gimple gs)
1910 return (is_gimple_assign (gs)
1911 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1912 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1913 && gimple_assign_rhs1 (gs) != error_mark_node
1914 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1915 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1918 /* Set BB to be the basic block holding G. */
1921 gimple_set_bb (gimple stmt, basic_block bb)
1923 stmt->gsbase.bb = bb;
1925 /* If the statement is a label, add the label to block-to-labels map
1926 so that we can speed up edge creation for GIMPLE_GOTOs. */
1927 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1932 t = gimple_label_label (stmt);
1933 uid = LABEL_DECL_UID (t);
1936 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1937 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1938 if (old_len <= (unsigned) uid)
1940 unsigned new_len = 3 * uid / 2 + 1;
1942 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1947 VEC_replace (basic_block, label_to_block_map, uid, bb);
1952 /* Modify the RHS of the assignment pointed-to by GSI using the
1953 operands in the expression tree EXPR.
1955 NOTE: The statement pointed-to by GSI may be reallocated if it
1956 did not have enough operand slots.
1958 This function is useful to convert an existing tree expression into
1959 the flat representation used for the RHS of a GIMPLE assignment.
1960 It will reallocate memory as needed to expand or shrink the number
1961 of operand slots needed to represent EXPR.
1963 NOTE: If you find yourself building a tree and then calling this
1964 function, you are most certainly doing it the slow way. It is much
1965 better to build a new assignment or to use the function
1966 gimple_assign_set_rhs_with_ops, which does not require an
1967 expression tree to be built. */
1970 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1972 enum tree_code subcode;
1975 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1976 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1980 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1981 operands OP1, OP2 and OP3.
1983 NOTE: The statement pointed-to by GSI may be reallocated if it
1984 did not have enough operand slots. */
1987 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1988 tree op1, tree op2, tree op3)
1990 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1991 gimple stmt = gsi_stmt (*gsi);
1993 /* If the new CODE needs more operands, allocate a new statement. */
1994 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1996 tree lhs = gimple_assign_lhs (stmt);
1997 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1998 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1999 gsi_replace (gsi, new_stmt, true);
2002 /* The LHS needs to be reset as this also changes the SSA name
2004 gimple_assign_set_lhs (stmt, lhs);
2007 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2008 gimple_set_subcode (stmt, code);
2009 gimple_assign_set_rhs1 (stmt, op1);
2010 if (new_rhs_ops > 1)
2011 gimple_assign_set_rhs2 (stmt, op2);
2012 if (new_rhs_ops > 2)
2013 gimple_assign_set_rhs3 (stmt, op3);
2017 /* Return the LHS of a statement that performs an assignment,
2018 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2019 for a call to a function that returns no value, or for a
2020 statement other than an assignment or a call. */
2023 gimple_get_lhs (const_gimple stmt)
2025 enum gimple_code code = gimple_code (stmt);
2027 if (code == GIMPLE_ASSIGN)
2028 return gimple_assign_lhs (stmt);
2029 else if (code == GIMPLE_CALL)
2030 return gimple_call_lhs (stmt);
2036 /* Set the LHS of a statement that performs an assignment,
2037 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2040 gimple_set_lhs (gimple stmt, tree lhs)
2042 enum gimple_code code = gimple_code (stmt);
2044 if (code == GIMPLE_ASSIGN)
2045 gimple_assign_set_lhs (stmt, lhs);
2046 else if (code == GIMPLE_CALL)
2047 gimple_call_set_lhs (stmt, lhs);
2052 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2053 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2054 expression with a different value.
2056 This will update any annotations (say debug bind stmts) referring
2057 to the original LHS, so that they use the RHS instead. This is
2058 done even if NLHS and LHS are the same, for it is understood that
2059 the RHS will be modified afterwards, and NLHS will not be assigned
2060 an equivalent value.
2062 Adjusting any non-annotation uses of the LHS, if needed, is a
2063 responsibility of the caller.
2065 The effect of this call should be pretty much the same as that of
2066 inserting a copy of STMT before STMT, and then removing the
2067 original stmt, at which time gsi_remove() would have update
2068 annotations, but using this function saves all the inserting,
2069 copying and removing. */
2072 gimple_replace_lhs (gimple stmt, tree nlhs)
2074 if (MAY_HAVE_DEBUG_STMTS)
2076 tree lhs = gimple_get_lhs (stmt);
2078 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2080 insert_debug_temp_for_var_def (NULL, lhs);
2083 gimple_set_lhs (stmt, nlhs);
2086 /* Return a deep copy of statement STMT. All the operands from STMT
2087 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2088 and VUSE operand arrays are set to empty in the new copy. */
2091 gimple_copy (gimple stmt)
2093 enum gimple_code code = gimple_code (stmt);
2094 unsigned num_ops = gimple_num_ops (stmt);
2095 gimple copy = gimple_alloc (code, num_ops);
2098 /* Shallow copy all the fields from STMT. */
2099 memcpy (copy, stmt, gimple_size (code));
2101 /* If STMT has sub-statements, deep-copy them as well. */
2102 if (gimple_has_substatements (stmt))
2107 switch (gimple_code (stmt))
2110 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2111 gimple_bind_set_body (copy, new_seq);
2112 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2113 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2117 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2118 gimple_catch_set_handler (copy, new_seq);
2119 t = unshare_expr (gimple_catch_types (stmt));
2120 gimple_catch_set_types (copy, t);
2123 case GIMPLE_EH_FILTER:
2124 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2125 gimple_eh_filter_set_failure (copy, new_seq);
2126 t = unshare_expr (gimple_eh_filter_types (stmt));
2127 gimple_eh_filter_set_types (copy, t);
2131 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2132 gimple_try_set_eval (copy, new_seq);
2133 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2134 gimple_try_set_cleanup (copy, new_seq);
2137 case GIMPLE_OMP_FOR:
2138 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2139 gimple_omp_for_set_pre_body (copy, new_seq);
2140 t = unshare_expr (gimple_omp_for_clauses (stmt));
2141 gimple_omp_for_set_clauses (copy, t);
2142 copy->gimple_omp_for.iter
2143 = ggc_alloc_vec_gimple_omp_for_iter
2144 (gimple_omp_for_collapse (stmt));
2145 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2147 gimple_omp_for_set_cond (copy, i,
2148 gimple_omp_for_cond (stmt, i));
2149 gimple_omp_for_set_index (copy, i,
2150 gimple_omp_for_index (stmt, i));
2151 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2152 gimple_omp_for_set_initial (copy, i, t);
2153 t = unshare_expr (gimple_omp_for_final (stmt, i));
2154 gimple_omp_for_set_final (copy, i, t);
2155 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2156 gimple_omp_for_set_incr (copy, i, t);
2160 case GIMPLE_OMP_PARALLEL:
2161 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2162 gimple_omp_parallel_set_clauses (copy, t);
2163 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2164 gimple_omp_parallel_set_child_fn (copy, t);
2165 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2166 gimple_omp_parallel_set_data_arg (copy, t);
2169 case GIMPLE_OMP_TASK:
2170 t = unshare_expr (gimple_omp_task_clauses (stmt));
2171 gimple_omp_task_set_clauses (copy, t);
2172 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2173 gimple_omp_task_set_child_fn (copy, t);
2174 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2175 gimple_omp_task_set_data_arg (copy, t);
2176 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2177 gimple_omp_task_set_copy_fn (copy, t);
2178 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2179 gimple_omp_task_set_arg_size (copy, t);
2180 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2181 gimple_omp_task_set_arg_align (copy, t);
2184 case GIMPLE_OMP_CRITICAL:
2185 t = unshare_expr (gimple_omp_critical_name (stmt));
2186 gimple_omp_critical_set_name (copy, t);
2189 case GIMPLE_OMP_SECTIONS:
2190 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2191 gimple_omp_sections_set_clauses (copy, t);
2192 t = unshare_expr (gimple_omp_sections_control (stmt));
2193 gimple_omp_sections_set_control (copy, t);
2196 case GIMPLE_OMP_SINGLE:
2197 case GIMPLE_OMP_SECTION:
2198 case GIMPLE_OMP_MASTER:
2199 case GIMPLE_OMP_ORDERED:
2201 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2202 gimple_omp_set_body (copy, new_seq);
2205 case GIMPLE_WITH_CLEANUP_EXPR:
2206 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2207 gimple_wce_set_cleanup (copy, new_seq);
2215 /* Make copy of operands. */
2218 for (i = 0; i < num_ops; i++)
2219 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2221 /* Clear out SSA operand vectors on COPY. */
2222 if (gimple_has_ops (stmt))
2224 gimple_set_def_ops (copy, NULL);
2225 gimple_set_use_ops (copy, NULL);
2228 if (gimple_has_mem_ops (stmt))
2230 gimple_set_vdef (copy, gimple_vdef (stmt));
2231 gimple_set_vuse (copy, gimple_vuse (stmt));
2234 /* SSA operands need to be updated. */
2235 gimple_set_modified (copy, true);
2242 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2243 a MODIFIED field. */
2246 gimple_set_modified (gimple s, bool modifiedp)
2248 if (gimple_has_ops (s))
2249 s->gsbase.modified = (unsigned) modifiedp;
2253 /* Return true if statement S has side-effects. We consider a
2254 statement to have side effects if:
2256 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2257 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2260 gimple_has_side_effects (const_gimple s)
2264 if (is_gimple_debug (s))
2267 /* We don't have to scan the arguments to check for
2268 volatile arguments, though, at present, we still
2269 do a scan to check for TREE_SIDE_EFFECTS. */
2270 if (gimple_has_volatile_ops (s))
2273 if (is_gimple_call (s))
2275 unsigned nargs = gimple_call_num_args (s);
2277 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2279 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2280 /* An infinite loop is considered a side effect. */
2283 if (gimple_call_lhs (s)
2284 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2286 gcc_assert (gimple_has_volatile_ops (s));
2290 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2293 for (i = 0; i < nargs; i++)
2294 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2296 gcc_assert (gimple_has_volatile_ops (s));
2304 for (i = 0; i < gimple_num_ops (s); i++)
2305 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2307 gcc_assert (gimple_has_volatile_ops (s));
2315 /* Return true if the RHS of statement S has side effects.
2316 We may use it to determine if it is admissable to replace
2317 an assignment or call with a copy of a previously-computed
2318 value. In such cases, side-effects due to the LHS are
2322 gimple_rhs_has_side_effects (const_gimple s)
2326 if (is_gimple_call (s))
2328 unsigned nargs = gimple_call_num_args (s);
2330 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2333 /* We cannot use gimple_has_volatile_ops here,
2334 because we must ignore a volatile LHS. */
2335 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2336 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2338 gcc_assert (gimple_has_volatile_ops (s));
2342 for (i = 0; i < nargs; i++)
2343 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2344 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2349 else if (is_gimple_assign (s))
2351 /* Skip the first operand, the LHS. */
2352 for (i = 1; i < gimple_num_ops (s); i++)
2353 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2354 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2356 gcc_assert (gimple_has_volatile_ops (s));
2360 else if (is_gimple_debug (s))
2364 /* For statements without an LHS, examine all arguments. */
2365 for (i = 0; i < gimple_num_ops (s); i++)
2366 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2367 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2369 gcc_assert (gimple_has_volatile_ops (s));
2377 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2378 Return true if S can trap. When INCLUDE_MEM is true, check whether
2379 the memory operations could trap. When INCLUDE_STORES is true and
2380 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2383 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2385 tree t, div = NULL_TREE;
2390 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2392 for (i = start; i < gimple_num_ops (s); i++)
2393 if (tree_could_trap_p (gimple_op (s, i)))
2397 switch (gimple_code (s))
2400 return gimple_asm_volatile_p (s);
2403 t = gimple_call_fndecl (s);
2404 /* Assume that calls to weak functions may trap. */
2405 if (!t || !DECL_P (t) || DECL_WEAK (t))
2410 t = gimple_expr_type (s);
2411 op = gimple_assign_rhs_code (s);
2412 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2413 div = gimple_assign_rhs2 (s);
2414 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2415 (INTEGRAL_TYPE_P (t)
2416 && TYPE_OVERFLOW_TRAPS (t)),
2426 /* Return true if statement S can trap. */
2429 gimple_could_trap_p (gimple s)
2431 return gimple_could_trap_p_1 (s, true, true);
2434 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2437 gimple_assign_rhs_could_trap_p (gimple s)
2439 gcc_assert (is_gimple_assign (s));
2440 return gimple_could_trap_p_1 (s, true, false);
2444 /* Print debugging information for gimple stmts generated. */
2447 dump_gimple_statistics (void)
2449 #ifdef GATHER_STATISTICS
2450 int i, total_tuples = 0, total_bytes = 0;
2452 fprintf (stderr, "\nGIMPLE statements\n");
2453 fprintf (stderr, "Kind Stmts Bytes\n");
2454 fprintf (stderr, "---------------------------------------\n");
2455 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2457 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2458 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2459 total_tuples += gimple_alloc_counts[i];
2460 total_bytes += gimple_alloc_sizes[i];
2462 fprintf (stderr, "---------------------------------------\n");
2463 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2464 fprintf (stderr, "---------------------------------------\n");
2466 fprintf (stderr, "No gimple statistics\n");
2471 /* Return the number of operands needed on the RHS of a GIMPLE
2472 assignment for an expression with tree code CODE. */
2475 get_gimple_rhs_num_ops (enum tree_code code)
2477 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2479 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2481 else if (rhs_class == GIMPLE_BINARY_RHS)
2483 else if (rhs_class == GIMPLE_TERNARY_RHS)
2489 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2491 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2492 : ((TYPE) == tcc_binary \
2493 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2494 : ((TYPE) == tcc_constant \
2495 || (TYPE) == tcc_declaration \
2496 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2497 : ((SYM) == TRUTH_AND_EXPR \
2498 || (SYM) == TRUTH_OR_EXPR \
2499 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2500 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2501 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2502 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2503 || (SYM) == DOT_PROD_EXPR \
2504 || (SYM) == REALIGN_LOAD_EXPR \
2505 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2506 : ((SYM) == COND_EXPR \
2507 || (SYM) == CONSTRUCTOR \
2508 || (SYM) == OBJ_TYPE_REF \
2509 || (SYM) == ASSERT_EXPR \
2510 || (SYM) == ADDR_EXPR \
2511 || (SYM) == WITH_SIZE_EXPR \
2512 || (SYM) == SSA_NAME \
2513 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2514 : GIMPLE_INVALID_RHS),
2515 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2517 const unsigned char gimple_rhs_class_table[] = {
2518 #include "all-tree.def"
2522 #undef END_OF_BASE_TREE_CODES
2524 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2526 /* Validation of GIMPLE expressions. */
2528 /* Returns true iff T is a valid RHS for an assignment to a renamed
2529 user -- or front-end generated artificial -- variable. */
2532 is_gimple_reg_rhs (tree t)
2534 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2537 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2538 LHS, or for a call argument. */
2541 is_gimple_mem_rhs (tree t)
2543 /* If we're dealing with a renamable type, either source or dest must be
2544 a renamed variable. */
2545 if (is_gimple_reg_type (TREE_TYPE (t)))
2546 return is_gimple_val (t);
2548 return is_gimple_val (t) || is_gimple_lvalue (t);
2551 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2554 is_gimple_lvalue (tree t)
2556 return (is_gimple_addressable (t)
2557 || TREE_CODE (t) == WITH_SIZE_EXPR
2558 /* These are complex lvalues, but don't have addresses, so they
2560 || TREE_CODE (t) == BIT_FIELD_REF);
2563 /* Return true if T is a GIMPLE condition. */
2566 is_gimple_condexpr (tree t)
2568 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2569 && !tree_could_throw_p (t)
2570 && is_gimple_val (TREE_OPERAND (t, 0))
2571 && is_gimple_val (TREE_OPERAND (t, 1))));
2574 /* Return true if T is something whose address can be taken. */
2577 is_gimple_addressable (tree t)
2579 return (is_gimple_id (t) || handled_component_p (t)
2580 || TREE_CODE (t) == MEM_REF);
2583 /* Return true if T is a valid gimple constant. */
2586 is_gimple_constant (const_tree t)
2588 switch (TREE_CODE (t))
2598 /* Vector constant constructors are gimple invariant. */
2600 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2601 return TREE_CONSTANT (t);
2610 /* Return true if T is a gimple address. */
2613 is_gimple_address (const_tree t)
2617 if (TREE_CODE (t) != ADDR_EXPR)
2620 op = TREE_OPERAND (t, 0);
2621 while (handled_component_p (op))
2623 if ((TREE_CODE (op) == ARRAY_REF
2624 || TREE_CODE (op) == ARRAY_RANGE_REF)
2625 && !is_gimple_val (TREE_OPERAND (op, 1)))
2628 op = TREE_OPERAND (op, 0);
2631 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2634 switch (TREE_CODE (op))
2649 /* Strip out all handled components that produce invariant
2653 strip_invariant_refs (const_tree op)
2655 while (handled_component_p (op))
2657 switch (TREE_CODE (op))
2660 case ARRAY_RANGE_REF:
2661 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2662 || TREE_OPERAND (op, 2) != NULL_TREE
2663 || TREE_OPERAND (op, 3) != NULL_TREE)
2668 if (TREE_OPERAND (op, 2) != NULL_TREE)
2674 op = TREE_OPERAND (op, 0);
2680 /* Return true if T is a gimple invariant address. */
2683 is_gimple_invariant_address (const_tree t)
2687 if (TREE_CODE (t) != ADDR_EXPR)
2690 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2694 if (TREE_CODE (op) == MEM_REF)
2696 const_tree op0 = TREE_OPERAND (op, 0);
2697 return (TREE_CODE (op0) == ADDR_EXPR
2698 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2699 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2702 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2705 /* Return true if T is a gimple invariant address at IPA level
2706 (so addresses of variables on stack are not allowed). */
2709 is_gimple_ip_invariant_address (const_tree t)
2713 if (TREE_CODE (t) != ADDR_EXPR)
2716 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2718 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2721 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2722 form of function invariant. */
2725 is_gimple_min_invariant (const_tree t)
2727 if (TREE_CODE (t) == ADDR_EXPR)
2728 return is_gimple_invariant_address (t);
2730 return is_gimple_constant (t);
2733 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2734 form of gimple minimal invariant. */
2737 is_gimple_ip_invariant (const_tree t)
2739 if (TREE_CODE (t) == ADDR_EXPR)
2740 return is_gimple_ip_invariant_address (t);
2742 return is_gimple_constant (t);
2745 /* Return true if T looks like a valid GIMPLE statement. */
2748 is_gimple_stmt (tree t)
2750 const enum tree_code code = TREE_CODE (t);
2755 /* The only valid NOP_EXPR is the empty statement. */
2756 return IS_EMPTY_STMT (t);
2760 /* These are only valid if they're void. */
2761 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2767 case CASE_LABEL_EXPR:
2768 case TRY_CATCH_EXPR:
2769 case TRY_FINALLY_EXPR:
2770 case EH_FILTER_EXPR:
2773 case STATEMENT_LIST:
2783 /* These are always void. */
2789 /* These are valid regardless of their type. */
2797 /* Return true if T is a variable. */
2800 is_gimple_variable (tree t)
2802 return (TREE_CODE (t) == VAR_DECL
2803 || TREE_CODE (t) == PARM_DECL
2804 || TREE_CODE (t) == RESULT_DECL
2805 || TREE_CODE (t) == SSA_NAME);
2808 /* Return true if T is a GIMPLE identifier (something with an address). */
2811 is_gimple_id (tree t)
2813 return (is_gimple_variable (t)
2814 || TREE_CODE (t) == FUNCTION_DECL
2815 || TREE_CODE (t) == LABEL_DECL
2816 || TREE_CODE (t) == CONST_DECL
2817 /* Allow string constants, since they are addressable. */
2818 || TREE_CODE (t) == STRING_CST);
2821 /* Return true if TYPE is a suitable type for a scalar register variable. */
2824 is_gimple_reg_type (tree type)
2826 return !AGGREGATE_TYPE_P (type);
2829 /* Return true if T is a non-aggregate register variable. */
2832 is_gimple_reg (tree t)
2834 if (TREE_CODE (t) == SSA_NAME)
2835 t = SSA_NAME_VAR (t);
2837 if (!is_gimple_variable (t))
2840 if (!is_gimple_reg_type (TREE_TYPE (t)))
2843 /* A volatile decl is not acceptable because we can't reuse it as
2844 needed. We need to copy it into a temp first. */
2845 if (TREE_THIS_VOLATILE (t))
2848 /* We define "registers" as things that can be renamed as needed,
2849 which with our infrastructure does not apply to memory. */
2850 if (needs_to_live_in_memory (t))
2853 /* Hard register variables are an interesting case. For those that
2854 are call-clobbered, we don't know where all the calls are, since
2855 we don't (want to) take into account which operations will turn
2856 into libcalls at the rtl level. For those that are call-saved,
2857 we don't currently model the fact that calls may in fact change
2858 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2859 level, and so miss variable changes that might imply. All around,
2860 it seems safest to not do too much optimization with these at the
2861 tree level at all. We'll have to rely on the rtl optimizers to
2862 clean this up, as there we've got all the appropriate bits exposed. */
2863 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2866 /* Complex and vector values must have been put into SSA-like form.
2867 That is, no assignments to the individual components. */
2868 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2869 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2870 return DECL_GIMPLE_REG_P (t);
2876 /* Return true if T is a GIMPLE variable whose address is not needed. */
2879 is_gimple_non_addressable (tree t)
2881 if (TREE_CODE (t) == SSA_NAME)
2882 t = SSA_NAME_VAR (t);
2884 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2887 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2890 is_gimple_val (tree t)
2892 /* Make loads from volatiles and memory vars explicit. */
2893 if (is_gimple_variable (t)
2894 && is_gimple_reg_type (TREE_TYPE (t))
2895 && !is_gimple_reg (t))
2898 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2901 /* Similarly, but accept hard registers as inputs to asm statements. */
2904 is_gimple_asm_val (tree t)
2906 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2909 return is_gimple_val (t);
2912 /* Return true if T is a GIMPLE minimal lvalue. */
2915 is_gimple_min_lval (tree t)
2917 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2919 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2922 /* Return true if T is a valid function operand of a CALL_EXPR. */
2925 is_gimple_call_addr (tree t)
2927 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2930 /* Return true if T is a valid address operand of a MEM_REF. */
2933 is_gimple_mem_ref_addr (tree t)
2935 return (is_gimple_reg (t)
2936 || TREE_CODE (t) == INTEGER_CST
2937 || (TREE_CODE (t) == ADDR_EXPR
2938 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2939 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2942 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2943 Otherwise, return NULL_TREE. */
2946 get_call_expr_in (tree t)
2948 if (TREE_CODE (t) == MODIFY_EXPR)
2949 t = TREE_OPERAND (t, 1);
2950 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2951 t = TREE_OPERAND (t, 0);
2952 if (TREE_CODE (t) == CALL_EXPR)
2958 /* Given a memory reference expression T, return its base address.
2959 The base address of a memory reference expression is the main
2960 object being referenced. For instance, the base address for
2961 'array[i].fld[j]' is 'array'. You can think of this as stripping
2962 away the offset part from a memory address.
2964 This function calls handled_component_p to strip away all the inner
2965 parts of the memory reference until it reaches the base object. */
2968 get_base_address (tree t)
2970 while (handled_component_p (t))
2971 t = TREE_OPERAND (t, 0);
2973 if ((TREE_CODE (t) == MEM_REF
2974 || TREE_CODE (t) == TARGET_MEM_REF)
2975 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2976 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2978 if (TREE_CODE (t) == SSA_NAME
2980 || TREE_CODE (t) == STRING_CST
2981 || TREE_CODE (t) == CONSTRUCTOR
2982 || INDIRECT_REF_P (t)
2983 || TREE_CODE (t) == MEM_REF
2984 || TREE_CODE (t) == TARGET_MEM_REF)
2991 recalculate_side_effects (tree t)
2993 enum tree_code code = TREE_CODE (t);
2994 int len = TREE_OPERAND_LENGTH (t);
2997 switch (TREE_CODE_CLASS (code))
2999 case tcc_expression:
3005 case PREDECREMENT_EXPR:
3006 case PREINCREMENT_EXPR:
3007 case POSTDECREMENT_EXPR:
3008 case POSTINCREMENT_EXPR:
3009 /* All of these have side-effects, no matter what their
3018 case tcc_comparison: /* a comparison expression */
3019 case tcc_unary: /* a unary arithmetic expression */
3020 case tcc_binary: /* a binary arithmetic expression */
3021 case tcc_reference: /* a reference */
3022 case tcc_vl_exp: /* a function call */
3023 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3024 for (i = 0; i < len; ++i)
3026 tree op = TREE_OPERAND (t, i);
3027 if (op && TREE_SIDE_EFFECTS (op))
3028 TREE_SIDE_EFFECTS (t) = 1;
3033 /* No side-effects. */
3041 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3042 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3043 we failed to create one. */
3046 canonicalize_cond_expr_cond (tree t)
3048 /* Strip conversions around boolean operations. */
3049 if (CONVERT_EXPR_P (t)
3050 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3051 t = TREE_OPERAND (t, 0);
3053 /* For (bool)x use x != 0. */
3054 if (CONVERT_EXPR_P (t)
3055 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3057 tree top0 = TREE_OPERAND (t, 0);
3058 t = build2 (NE_EXPR, TREE_TYPE (t),
3059 top0, build_int_cst (TREE_TYPE (top0), 0));
3061 /* For !x use x == 0. */
3062 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3064 tree top0 = TREE_OPERAND (t, 0);
3065 t = build2 (EQ_EXPR, TREE_TYPE (t),
3066 top0, build_int_cst (TREE_TYPE (top0), 0));
3068 /* For cmp ? 1 : 0 use cmp. */
3069 else if (TREE_CODE (t) == COND_EXPR
3070 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3071 && integer_onep (TREE_OPERAND (t, 1))
3072 && integer_zerop (TREE_OPERAND (t, 2)))
3074 tree top0 = TREE_OPERAND (t, 0);
3075 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3076 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3079 if (is_gimple_condexpr (t))
3085 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3086 the positions marked by the set ARGS_TO_SKIP. */
3089 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3092 tree fn = gimple_call_fn (stmt);
3093 int nargs = gimple_call_num_args (stmt);
3094 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3097 for (i = 0; i < nargs; i++)
3098 if (!bitmap_bit_p (args_to_skip, i))
3099 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3101 new_stmt = gimple_build_call_vec (fn, vargs);
3102 VEC_free (tree, heap, vargs);
3103 if (gimple_call_lhs (stmt))
3104 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3106 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3107 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3109 gimple_set_block (new_stmt, gimple_block (stmt));
3110 if (gimple_has_location (stmt))
3111 gimple_set_location (new_stmt, gimple_location (stmt));
3112 gimple_call_copy_flags (new_stmt, stmt);
3113 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3115 gimple_set_modified (new_stmt, true);
3121 static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
3123 /* Structure used to maintain a cache of some type pairs compared by
3124 gimple_types_compatible_p when comparing aggregate types. There are
3125 three possible values for SAME_P:
3127 -2: The pair (T1, T2) has just been inserted in the table.
3128 0: T1 and T2 are different types.
3129 1: T1 and T2 are the same type.
3131 The two elements in the SAME_P array are indexed by the comparison
3138 signed char same_p[2];
3140 typedef struct type_pair_d *type_pair_t;
3142 DEF_VEC_P(type_pair_t);
3143 DEF_VEC_ALLOC_P(type_pair_t,heap);
3145 /* Return a hash value for the type pair pointed-to by P. */
3148 type_pair_hash (const void *p)
3150 const struct type_pair_d *pair = (const struct type_pair_d *) p;
3151 hashval_t val1 = pair->uid1;
3152 hashval_t val2 = pair->uid2;
3153 return (iterative_hash_hashval_t (val2, val1)
3154 ^ iterative_hash_hashval_t (val1, val2));
3157 /* Compare two type pairs pointed-to by P1 and P2. */
3160 type_pair_eq (const void *p1, const void *p2)
3162 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3163 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3164 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3165 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
3168 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3169 entry if none existed. */
3172 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3174 struct type_pair_d pair;
3178 if (*visited_p == NULL)
3180 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3181 gcc_obstack_init (ob_p);
3184 pair.uid1 = TYPE_UID (t1);
3185 pair.uid2 = TYPE_UID (t2);
3186 slot = htab_find_slot (*visited_p, &pair, INSERT);
3189 p = *((type_pair_t *) slot);
3192 p = XOBNEW (ob_p, struct type_pair_d);
3193 p->uid1 = TYPE_UID (t1);
3194 p->uid2 = TYPE_UID (t2);
3203 /* Per pointer state for the SCC finding. The on_sccstack flag
3204 is not strictly required, it is true when there is no hash value
3205 recorded for the type and false otherwise. But querying that
3210 unsigned int dfsnum;
3219 static unsigned int next_dfs_num;
3220 static unsigned int gtc_next_dfs_num;
3223 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3225 typedef struct GTY(()) gimple_type_leader_entry_s {
3228 } gimple_type_leader_entry;
3230 #define GIMPLE_TYPE_LEADER_SIZE 16381
3231 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3232 gimple_type_leader_entry *gimple_type_leader;
3234 /* Lookup an existing leader for T and return it or NULL_TREE, if
3235 there is none in the cache. */
3238 gimple_lookup_type_leader (tree t)
3240 gimple_type_leader_entry *leader;
3242 if (!gimple_type_leader)
3245 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3246 if (leader->type != t)
3249 return leader->leader;
3252 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3253 true then if any type has no name return false, otherwise return
3254 true if both types have no names. */
3257 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3259 tree name1 = TYPE_NAME (t1);
3260 tree name2 = TYPE_NAME (t2);
3262 /* Consider anonymous types all unique for completion. */
3263 if (for_completion_p
3264 && (!name1 || !name2))
3267 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3269 name1 = DECL_NAME (name1);
3270 if (for_completion_p
3274 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3276 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3278 name2 = DECL_NAME (name2);
3279 if (for_completion_p
3283 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3285 /* Identifiers can be compared with pointer equality rather
3286 than a string comparison. */
3293 /* Return true if the field decls F1 and F2 are at the same offset.
3295 This is intended to be used on GIMPLE types only. */
3298 gimple_compare_field_offset (tree f1, tree f2)
3300 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3302 tree offset1 = DECL_FIELD_OFFSET (f1);
3303 tree offset2 = DECL_FIELD_OFFSET (f2);
3304 return ((offset1 == offset2
3305 /* Once gimplification is done, self-referential offsets are
3306 instantiated as operand #2 of the COMPONENT_REF built for
3307 each access and reset. Therefore, they are not relevant
3308 anymore and fields are interchangeable provided that they
3309 represent the same access. */
3310 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3311 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3312 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3313 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3314 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3315 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3316 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3317 || operand_equal_p (offset1, offset2, 0))
3318 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3319 DECL_FIELD_BIT_OFFSET (f2)));
3322 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3323 should be, so handle differing ones specially by decomposing
3324 the offset into a byte and bit offset manually. */
3325 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3326 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3328 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3329 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3330 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3331 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3332 + bit_offset1 / BITS_PER_UNIT);
3333 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3334 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3335 + bit_offset2 / BITS_PER_UNIT);
3336 if (byte_offset1 != byte_offset2)
3338 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3344 /* If the type T1 and the type T2 are a complete and an incomplete
3345 variant of the same type return true. */
3348 gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
3350 /* If one pointer points to an incomplete type variant of
3351 the other pointed-to type they are the same. */
3352 if (TREE_CODE (t1) == TREE_CODE (t2)
3353 && RECORD_OR_UNION_TYPE_P (t1)
3354 && (!COMPLETE_TYPE_P (t1)
3355 || !COMPLETE_TYPE_P (t2))
3356 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3357 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3358 TYPE_MAIN_VARIANT (t2), true))
3364 gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
3365 VEC(type_pair_t, heap) **,
3366 struct pointer_map_t *, struct obstack *);
3368 /* DFS visit the edge from the callers type pair with state *STATE to
3369 the pair T1, T2 while operating in FOR_MERGING_P mode.
3370 Update the merging status if it is not part of the SCC containing the
3371 callers pair and return it.
3372 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3375 gtc_visit (tree t1, tree t2, enum gtc_mode mode,
3377 VEC(type_pair_t, heap) **sccstack,
3378 struct pointer_map_t *sccstate,
3379 struct obstack *sccstate_obstack)
3381 struct sccs *cstate = NULL;
3385 /* Check first for the obvious case of pointer identity. */
3389 /* Check that we have two types to compare. */
3390 if (t1 == NULL_TREE || t2 == NULL_TREE)
3393 /* If the types have been previously registered and found equal
3395 if (mode == GTC_MERGE)
3397 tree leader1 = gimple_lookup_type_leader (t1);
3398 tree leader2 = gimple_lookup_type_leader (t2);
3401 || (leader1 && leader1 == leader2))
3404 else if (mode == GTC_DIAG)
3406 if (TYPE_CANONICAL (t1)
3407 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3411 /* Can't be the same type if the types don't have the same code. */
3412 if (TREE_CODE (t1) != TREE_CODE (t2))
3415 /* Can't be the same type if they have different CV qualifiers. */
3416 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3419 /* Void types are always the same. */
3420 if (TREE_CODE (t1) == VOID_TYPE)
3423 /* Do some simple checks before doing three hashtable queries. */
3424 if (INTEGRAL_TYPE_P (t1)
3425 || SCALAR_FLOAT_TYPE_P (t1)
3426 || FIXED_POINT_TYPE_P (t1)
3427 || TREE_CODE (t1) == VECTOR_TYPE
3428 || TREE_CODE (t1) == COMPLEX_TYPE
3429 || TREE_CODE (t1) == OFFSET_TYPE)
3431 /* Can't be the same type if they have different alignment,
3432 sign, precision or mode. */
3433 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3434 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3435 || TYPE_MODE (t1) != TYPE_MODE (t2)
3436 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3439 if (TREE_CODE (t1) == INTEGER_TYPE
3440 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3441 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3444 /* That's all we need to check for float and fixed-point types. */
3445 if (SCALAR_FLOAT_TYPE_P (t1)
3446 || FIXED_POINT_TYPE_P (t1))
3449 /* For integral types fall thru to more complex checks. */
3452 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3454 /* Can't be the same type if they have different alignment or mode. */
3455 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3456 || TYPE_MODE (t1) != TYPE_MODE (t2))
3460 /* If the hash values of t1 and t2 are different the types can't
3461 possibly be the same. This helps keeping the type-pair hashtable
3462 small, only tracking comparisons for hash collisions. */
3463 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3466 /* Allocate a new cache entry for this comparison. */
3467 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
3468 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3470 /* We have already decided whether T1 and T2 are the
3471 same, return the cached result. */
3472 return p->same_p[mode] == 1;
3475 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3476 cstate = (struct sccs *)*slot;
3477 /* Not yet visited. DFS recurse. */
3480 gimple_types_compatible_p_1 (t1, t2, mode, p,
3481 sccstack, sccstate, sccstate_obstack);
3482 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3483 state->low = MIN (state->low, cstate->low);
3485 /* If the type is still on the SCC stack adjust the parents low. */
3486 if (cstate->dfsnum < state->dfsnum
3487 && cstate->on_sccstack)
3488 state->low = MIN (cstate->dfsnum, state->low);
3490 /* Return the current lattice value. We start with an equality
3491 assumption so types part of a SCC will be optimistically
3492 treated equal unless proven otherwise. */
3493 return cstate->u.same_p;
3496 /* Worker for gimple_types_compatible.
3497 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3500 gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
3502 VEC(type_pair_t, heap) **sccstack,
3503 struct pointer_map_t *sccstate,
3504 struct obstack *sccstate_obstack)
3508 gcc_assert (p->same_p[mode] == -2);
3510 state = XOBNEW (sccstate_obstack, struct sccs);
3511 *pointer_map_insert (sccstate, p) = state;
3513 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3514 state->dfsnum = gtc_next_dfs_num++;
3515 state->low = state->dfsnum;
3516 state->on_sccstack = true;
3517 /* Start with an equality assumption. As we DFS recurse into child
3518 SCCs this assumption may get revisited. */
3519 state->u.same_p = 1;
3521 /* If their attributes are not the same they can't be the same type. */
3522 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3523 goto different_types;
3525 /* Do type-specific comparisons. */
3526 switch (TREE_CODE (t1))
3530 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3531 state, sccstack, sccstate, sccstate_obstack))
3532 goto different_types;
3536 /* Array types are the same if the element types are the same and
3537 the number of elements are the same. */
3538 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3539 state, sccstack, sccstate, sccstate_obstack)
3540 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3541 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3542 goto different_types;
3545 tree i1 = TYPE_DOMAIN (t1);
3546 tree i2 = TYPE_DOMAIN (t2);
3548 /* For an incomplete external array, the type domain can be
3549 NULL_TREE. Check this condition also. */
3550 if (i1 == NULL_TREE && i2 == NULL_TREE)
3552 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3553 goto different_types;
3554 /* If for a complete array type the possibly gimplified sizes
3555 are different the types are different. */
3556 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3559 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3560 goto different_types;
3563 tree min1 = TYPE_MIN_VALUE (i1);
3564 tree min2 = TYPE_MIN_VALUE (i2);
3565 tree max1 = TYPE_MAX_VALUE (i1);
3566 tree max2 = TYPE_MAX_VALUE (i2);
3568 /* The minimum/maximum values have to be the same. */
3571 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3572 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3573 || operand_equal_p (min1, min2, 0))))
3576 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3577 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3578 || operand_equal_p (max1, max2, 0)))))
3581 goto different_types;
3586 /* Method types should belong to the same class. */
3587 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3588 mode, state, sccstack, sccstate, sccstate_obstack))
3589 goto different_types;
3594 /* Function types are the same if the return type and arguments types
3596 if ((mode != GTC_DIAG
3597 || !gimple_compatible_complete_and_incomplete_subtype_p
3598 (TREE_TYPE (t1), TREE_TYPE (t2)))
3599 && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3600 state, sccstack, sccstate, sccstate_obstack))
3601 goto different_types;
3603 if (!comp_type_attributes (t1, t2))
3604 goto different_types;
3606 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3610 tree parms1, parms2;
3612 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3614 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3616 if ((mode == GTC_MERGE
3617 || !gimple_compatible_complete_and_incomplete_subtype_p
3618 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3619 && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
3620 state, sccstack, sccstate, sccstate_obstack))
3621 goto different_types;
3624 if (parms1 || parms2)
3625 goto different_types;
3632 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3633 state, sccstack, sccstate, sccstate_obstack)
3634 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3635 TYPE_OFFSET_BASETYPE (t2), mode,
3636 state, sccstack, sccstate, sccstate_obstack))
3637 goto different_types;
3643 case REFERENCE_TYPE:
3645 /* If the two pointers have different ref-all attributes,
3646 they can't be the same type. */
3647 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3648 goto different_types;
3650 /* If one pointer points to an incomplete type variant of
3651 the other pointed-to type they are the same. */
3652 if (mode == GTC_DIAG
3653 && gimple_compatible_complete_and_incomplete_subtype_p
3654 (TREE_TYPE (t1), TREE_TYPE (t2)))
3657 /* Otherwise, pointer and reference types are the same if the
3658 pointed-to types are the same. */
3659 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3660 state, sccstack, sccstate, sccstate_obstack))
3663 goto different_types;
3667 /* There is only one decltype(nullptr). */
3673 tree min1 = TYPE_MIN_VALUE (t1);
3674 tree max1 = TYPE_MAX_VALUE (t1);
3675 tree min2 = TYPE_MIN_VALUE (t2);
3676 tree max2 = TYPE_MAX_VALUE (t2);
3677 bool min_equal_p = false;
3678 bool max_equal_p = false;
3680 /* If either type has a minimum value, the other type must
3682 if (min1 == NULL_TREE && min2 == NULL_TREE)
3684 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3687 /* Likewise, if either type has a maximum value, the other
3688 type must have the same. */
3689 if (max1 == NULL_TREE && max2 == NULL_TREE)
3691 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3694 if (!min_equal_p || !max_equal_p)
3695 goto different_types;
3702 /* FIXME lto, we cannot check bounds on enumeral types because
3703 different front ends will produce different values.
3704 In C, enumeral types are integers, while in C++ each element
3705 will have its own symbolic value. We should decide how enums
3706 are to be represented in GIMPLE and have each front end lower
3710 /* For enumeral types, all the values must be the same. */
3711 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3714 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3716 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3718 tree c1 = TREE_VALUE (v1);
3719 tree c2 = TREE_VALUE (v2);
3721 if (TREE_CODE (c1) == CONST_DECL)
3722 c1 = DECL_INITIAL (c1);
3724 if (TREE_CODE (c2) == CONST_DECL)
3725 c2 = DECL_INITIAL (c2);
3727 if (tree_int_cst_equal (c1, c2) != 1)
3728 goto different_types;
3731 /* If one enumeration has more values than the other, they
3732 are not the same. */
3734 goto different_types;
3741 case QUAL_UNION_TYPE:
3745 /* The struct tags shall compare equal. */
3746 if (mode == GTC_MERGE
3747 && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3748 TYPE_MAIN_VARIANT (t2), false))
3749 goto different_types;
3751 /* For aggregate types, all the fields must be the same. */
3752 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3754 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3756 /* The fields must have the same name, offset and type. */
3757 if ((mode == GTC_MERGE
3758 && DECL_NAME (f1) != DECL_NAME (f2))
3759 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3760 || !gimple_compare_field_offset (f1, f2)
3761 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
3762 state, sccstack, sccstate, sccstate_obstack))
3763 goto different_types;
3766 /* If one aggregate has more fields than the other, they
3767 are not the same. */
3769 goto different_types;
3778 /* Common exit path for types that are not compatible. */
3780 state->u.same_p = 0;
3783 /* Common exit path for types that are compatible. */
3785 gcc_assert (state->u.same_p == 1);
3788 if (state->low == state->dfsnum)
3792 /* Pop off the SCC and set its cache values to the final
3793 comparison result. */
3796 struct sccs *cstate;
3797 x = VEC_pop (type_pair_t, *sccstack);
3798 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3799 cstate->on_sccstack = false;
3800 x->same_p[mode] = state->u.same_p;
3805 return state->u.same_p;
3808 /* Return true iff T1 and T2 are structurally identical. When
3809 FOR_MERGING_P is true the an incomplete type and a complete type
3810 are considered different, otherwise they are considered compatible. */
3813 gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
3815 VEC(type_pair_t, heap) *sccstack = NULL;
3816 struct pointer_map_t *sccstate;
3817 struct obstack sccstate_obstack;
3818 type_pair_t p = NULL;
3821 /* Before starting to set up the SCC machinery handle simple cases. */
3823 /* Check first for the obvious case of pointer identity. */
3827 /* Check that we have two types to compare. */
3828 if (t1 == NULL_TREE || t2 == NULL_TREE)
3831 /* If the types have been previously registered and found equal
3833 if (mode == GTC_MERGE)
3835 tree leader1 = gimple_lookup_type_leader (t1);
3836 tree leader2 = gimple_lookup_type_leader (t2);
3839 || (leader1 && leader1 == leader2))
3842 else if (mode == GTC_DIAG)
3844 if (TYPE_CANONICAL (t1)
3845 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3849 /* Can't be the same type if the types don't have the same code. */
3850 if (TREE_CODE (t1) != TREE_CODE (t2))
3853 /* Can't be the same type if they have different CV qualifiers. */
3854 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3857 /* Void types are always the same. */
3858 if (TREE_CODE (t1) == VOID_TYPE)
3861 /* Do some simple checks before doing three hashtable queries. */
3862 if (INTEGRAL_TYPE_P (t1)
3863 || SCALAR_FLOAT_TYPE_P (t1)
3864 || FIXED_POINT_TYPE_P (t1)
3865 || TREE_CODE (t1) == VECTOR_TYPE
3866 || TREE_CODE (t1) == COMPLEX_TYPE
3867 || TREE_CODE (t1) == OFFSET_TYPE)
3869 /* Can't be the same type if they have different alignment,
3870 sign, precision or mode. */
3871 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3872 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3873 || TYPE_MODE (t1) != TYPE_MODE (t2)
3874 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3877 if (TREE_CODE (t1) == INTEGER_TYPE
3878 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3879 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3882 /* That's all we need to check for float and fixed-point types. */
3883 if (SCALAR_FLOAT_TYPE_P (t1)
3884 || FIXED_POINT_TYPE_P (t1))
3887 /* For integral types fall thru to more complex checks. */
3890 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3892 /* Can't be the same type if they have different alignment or mode. */
3893 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3894 || TYPE_MODE (t1) != TYPE_MODE (t2))
3898 /* If the hash values of t1 and t2 are different the types can't
3899 possibly be the same. This helps keeping the type-pair hashtable
3900 small, only tracking comparisons for hash collisions. */
3901 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3904 /* If we've visited this type pair before (in the case of aggregates
3905 with self-referential types), and we made a decision, return it. */
3906 p = lookup_type_pair (t1, t2, >c_visited, >c_ob);
3907 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3909 /* We have already decided whether T1 and T2 are the
3910 same, return the cached result. */
3911 return p->same_p[mode] == 1;
3914 /* Now set up the SCC machinery for the comparison. */
3915 gtc_next_dfs_num = 1;
3916 sccstate = pointer_map_create ();
3917 gcc_obstack_init (&sccstate_obstack);
3918 res = gimple_types_compatible_p_1 (t1, t2, mode, p,
3919 &sccstack, sccstate, &sccstate_obstack);
3920 VEC_free (type_pair_t, heap, sccstack);
3921 pointer_map_destroy (sccstate);
3922 obstack_free (&sccstate_obstack, NULL);
3929 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3930 struct pointer_map_t *, struct obstack *,
3933 /* DFS visit the edge from the callers type with state *STATE to T.
3934 Update the callers type hash V with the hash for T if it is not part
3935 of the SCC containing the callers type and return it.
3936 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3939 visit (tree t, struct sccs *state, hashval_t v,
3940 VEC (tree, heap) **sccstack,
3941 struct pointer_map_t *sccstate,
3942 struct obstack *sccstate_obstack, enum gtc_mode mode)
3944 struct sccs *cstate = NULL;
3945 struct tree_int_map m;
3948 /* If there is a hash value recorded for this type then it can't
3949 possibly be part of our parent SCC. Simply mix in its hash. */
3951 if ((slot = htab_find_slot (mode == GTC_MERGE
3952 ? type_hash_cache : canonical_type_hash_cache,
3955 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3957 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3958 cstate = (struct sccs *)*slot;
3962 /* Not yet visited. DFS recurse. */
3963 tem = iterative_hash_gimple_type (t, v,
3964 sccstack, sccstate, sccstate_obstack,
3967 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3968 state->low = MIN (state->low, cstate->low);
3969 /* If the type is no longer on the SCC stack and thus is not part
3970 of the parents SCC mix in its hash value. Otherwise we will
3971 ignore the type for hashing purposes and return the unaltered
3973 if (!cstate->on_sccstack)
3976 if (cstate->dfsnum < state->dfsnum
3977 && cstate->on_sccstack)
3978 state->low = MIN (cstate->dfsnum, state->low);
3980 /* We are part of our parents SCC, skip this type during hashing
3981 and return the unaltered hash value. */
3985 /* Hash NAME with the previous hash value V and return it. */
3988 iterative_hash_name (tree name, hashval_t v)
3992 if (TREE_CODE (name) == TYPE_DECL)
3993 name = DECL_NAME (name);
3996 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3997 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4000 /* Returning a hash value for gimple type TYPE combined with VAL.
4001 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4003 To hash a type we end up hashing in types that are reachable.
4004 Through pointers we can end up with cycles which messes up the
4005 required property that we need to compute the same hash value
4006 for structurally equivalent types. To avoid this we have to
4007 hash all types in a cycle (the SCC) in a commutative way. The
4008 easiest way is to not mix in the hashes of the SCC members at
4009 all. To make this work we have to delay setting the hash
4010 values of the SCC until it is complete. */
4013 iterative_hash_gimple_type (tree type, hashval_t val,
4014 VEC(tree, heap) **sccstack,
4015 struct pointer_map_t *sccstate,
4016 struct obstack *sccstate_obstack,
4023 /* Not visited during this DFS walk. */
4024 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4025 state = XOBNEW (sccstate_obstack, struct sccs);
4026 *pointer_map_insert (sccstate, type) = state;
4028 VEC_safe_push (tree, heap, *sccstack, type);
4029 state->dfsnum = next_dfs_num++;
4030 state->low = state->dfsnum;
4031 state->on_sccstack = true;
4033 /* Combine a few common features of types so that types are grouped into
4034 smaller sets; when searching for existing matching types to merge,
4035 only existing types having the same features as the new type will be
4037 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4038 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4039 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4041 /* Do not hash the types size as this will cause differences in
4042 hash values for the complete vs. the incomplete type variant. */
4044 /* Incorporate common features of numerical types. */
4045 if (INTEGRAL_TYPE_P (type)
4046 || SCALAR_FLOAT_TYPE_P (type)
4047 || FIXED_POINT_TYPE_P (type))
4049 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4050 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4051 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4054 /* For pointer and reference types, fold in information about the type
4055 pointed to but do not recurse into possibly incomplete types to
4056 avoid hash differences for complete vs. incomplete types. */
4057 if (POINTER_TYPE_P (type))
4059 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4061 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4062 v = iterative_hash_name
4063 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4066 v = visit (TREE_TYPE (type), state, v,
4067 sccstack, sccstate, sccstate_obstack, mode);
4070 /* For integer types hash the types min/max values and the string flag. */
4071 if (TREE_CODE (type) == INTEGER_TYPE)
4073 /* OMP lowering can introduce error_mark_node in place of
4074 random local decls in types. */
4075 if (TYPE_MIN_VALUE (type) != error_mark_node)
4076 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4077 if (TYPE_MAX_VALUE (type) != error_mark_node)
4078 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4079 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4082 /* For array types hash their domain and the string flag. */
4083 if (TREE_CODE (type) == ARRAY_TYPE
4084 && TYPE_DOMAIN (type))
4086 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4087 v = visit (TYPE_DOMAIN (type), state, v,
4088 sccstack, sccstate, sccstate_obstack, mode);
4091 /* Recurse for aggregates with a single element type. */
4092 if (TREE_CODE (type) == ARRAY_TYPE
4093 || TREE_CODE (type) == COMPLEX_TYPE
4094 || TREE_CODE (type) == VECTOR_TYPE)
4095 v = visit (TREE_TYPE (type), state, v,
4096 sccstack, sccstate, sccstate_obstack, mode);
4098 /* Incorporate function return and argument types. */
4099 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4104 /* For method types also incorporate their parent class. */
4105 if (TREE_CODE (type) == METHOD_TYPE)
4106 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4107 sccstack, sccstate, sccstate_obstack, mode);
4109 /* For result types allow mismatch in completeness. */
4110 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4112 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4113 v = iterative_hash_name
4114 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4117 v = visit (TREE_TYPE (type), state, v,
4118 sccstack, sccstate, sccstate_obstack, mode);
4120 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4122 /* For argument types allow mismatch in completeness. */
4123 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4125 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4126 v = iterative_hash_name
4127 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
4130 v = visit (TREE_VALUE (p), state, v,
4131 sccstack, sccstate, sccstate_obstack, mode);
4135 v = iterative_hash_hashval_t (na, v);
4138 if (TREE_CODE (type) == RECORD_TYPE
4139 || TREE_CODE (type) == UNION_TYPE
4140 || TREE_CODE (type) == QUAL_UNION_TYPE)
4145 if (mode == GTC_MERGE)
4146 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
4148 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4150 if (mode == GTC_MERGE)
4151 v = iterative_hash_name (DECL_NAME (f), v);
4152 v = visit (TREE_TYPE (f), state, v,
4153 sccstack, sccstate, sccstate_obstack, mode);
4157 v = iterative_hash_hashval_t (nf, v);
4160 /* Record hash for us. */
4163 /* See if we found an SCC. */
4164 if (state->low == state->dfsnum)
4168 /* Pop off the SCC and set its hash values. */
4171 struct sccs *cstate;
4172 struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
4173 x = VEC_pop (tree, *sccstack);
4174 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4175 cstate->on_sccstack = false;
4177 m->to = cstate->u.hash;
4178 slot = htab_find_slot (mode == GTC_MERGE
4179 ? type_hash_cache : canonical_type_hash_cache,
4181 gcc_assert (!*slot);
4187 return iterative_hash_hashval_t (v, val);
4191 /* Returns a hash value for P (assumed to be a type). The hash value
4192 is computed using some distinguishing features of the type. Note
4193 that we cannot use pointer hashing here as we may be dealing with
4194 two distinct instances of the same type.
4196 This function should produce the same hash value for two compatible
4197 types according to gimple_types_compatible_p. */
4200 gimple_type_hash_1 (const void *p, enum gtc_mode mode)
4202 const_tree t = (const_tree) p;
4203 VEC(tree, heap) *sccstack = NULL;
4204 struct pointer_map_t *sccstate;
4205 struct obstack sccstate_obstack;
4208 struct tree_int_map m;
4210 if (mode == GTC_MERGE
4211 && type_hash_cache == NULL)
4212 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4213 tree_int_map_eq, NULL);
4214 else if (mode == GTC_DIAG
4215 && canonical_type_hash_cache == NULL)
4216 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4217 tree_int_map_eq, NULL);
4219 m.base.from = CONST_CAST_TREE (t);
4220 if ((slot = htab_find_slot (mode == GTC_MERGE
4221 ? type_hash_cache : canonical_type_hash_cache,
4224 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4226 /* Perform a DFS walk and pre-hash all reachable types. */
4228 sccstate = pointer_map_create ();
4229 gcc_obstack_init (&sccstate_obstack);
4230 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4231 &sccstack, sccstate, &sccstate_obstack,
4233 VEC_free (tree, heap, sccstack);
4234 pointer_map_destroy (sccstate);
4235 obstack_free (&sccstate_obstack, NULL);
4241 gimple_type_hash (const void *p)
4243 return gimple_type_hash_1 (p, GTC_MERGE);
4247 gimple_canonical_type_hash (const void *p)
4249 return gimple_type_hash_1 (p, GTC_DIAG);
4253 /* Returns nonzero if P1 and P2 are equal. */
4256 gimple_type_eq (const void *p1, const void *p2)
4258 const_tree t1 = (const_tree) p1;
4259 const_tree t2 = (const_tree) p2;
4260 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4261 CONST_CAST_TREE (t2), GTC_MERGE);
4265 /* Register type T in the global type table gimple_types.
4266 If another type T', compatible with T, already existed in
4267 gimple_types then return T', otherwise return T. This is used by
4268 LTO to merge identical types read from different TUs. */
4271 gimple_register_type (tree t)
4274 gimple_type_leader_entry *leader;
4275 tree mv_leader = NULL_TREE;
4277 gcc_assert (TYPE_P (t));
4279 if (!gimple_type_leader)
4280 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4281 (GIMPLE_TYPE_LEADER_SIZE);
4282 /* If we registered this type before return the cached result. */
4283 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4284 if (leader->type == t)
4285 return leader->leader;
4287 /* Always register the main variant first. This is important so we
4288 pick up the non-typedef variants as canonical, otherwise we'll end
4289 up taking typedef ids for structure tags during comparison. */
4290 if (TYPE_MAIN_VARIANT (t) != t)
4291 mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
4293 if (gimple_types == NULL)
4294 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4296 slot = htab_find_slot (gimple_types, t, INSERT);
4298 && *(tree *)slot != t)
4300 tree new_type = (tree) *((tree *) slot);
4302 /* Do not merge types with different addressability. */
4303 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4305 /* If t is not its main variant then make t unreachable from its
4306 main variant list. Otherwise we'd queue up a lot of duplicates
4308 if (t != TYPE_MAIN_VARIANT (t))
4310 tree tem = TYPE_MAIN_VARIANT (t);
4311 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4312 tem = TYPE_NEXT_VARIANT (tem);
4314 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4315 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4318 /* If we are a pointer then remove us from the pointer-to or
4319 reference-to chain. Otherwise we'd queue up a lot of duplicates
4321 if (TREE_CODE (t) == POINTER_TYPE)
4323 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4324 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4327 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4328 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4329 tem = TYPE_NEXT_PTR_TO (tem);
4331 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4333 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4335 else if (TREE_CODE (t) == REFERENCE_TYPE)
4337 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4338 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4341 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4342 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4343 tem = TYPE_NEXT_REF_TO (tem);
4345 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4347 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4351 leader->leader = new_type;
4358 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4359 if (TYPE_MAIN_VARIANT (t) != t
4360 && TYPE_MAIN_VARIANT (t) != mv_leader)
4362 /* Remove us from our main variant list as we are not the variant
4363 leader and the variant leader will change. */
4364 tree tem = TYPE_MAIN_VARIANT (t);
4365 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4366 tem = TYPE_NEXT_VARIANT (tem);
4368 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4369 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4370 /* Adjust our main variant. Linking us into its variant list
4371 will happen at fixup time. */
4372 TYPE_MAIN_VARIANT (t) = mv_leader;
4381 /* Returns nonzero if P1 and P2 are equal. */
4384 gimple_canonical_type_eq (const void *p1, const void *p2)
4386 const_tree t1 = (const_tree) p1;
4387 const_tree t2 = (const_tree) p2;
4388 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4389 CONST_CAST_TREE (t2), GTC_DIAG);
4392 /* Register type T in the global type table gimple_types.
4393 If another type T', compatible with T, already existed in
4394 gimple_types then return T', otherwise return T. This is used by
4395 LTO to merge identical types read from different TUs. */
4398 gimple_register_canonical_type (tree t)
4403 gcc_assert (TYPE_P (t));
4405 if (TYPE_CANONICAL (t))
4406 return TYPE_CANONICAL (t);
4408 /* Always register the type itself first so that if it turns out
4409 to be the canonical type it will be the one we merge to as well. */
4410 t = gimple_register_type (t);
4412 /* Always register the main variant first. This is important so we
4413 pick up the non-typedef variants as canonical, otherwise we'll end
4414 up taking typedef ids for structure tags during comparison. */
4415 if (TYPE_MAIN_VARIANT (t) != t)
4416 gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
4418 if (gimple_canonical_types == NULL)
4419 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4420 gimple_canonical_type_eq, 0);
4422 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4424 && *(tree *)slot != t)
4426 tree new_type = (tree) *((tree *) slot);
4428 TYPE_CANONICAL (t) = new_type;
4433 TYPE_CANONICAL (t) = t;
4437 /* Also cache the canonical type in the non-leaders. */
4438 TYPE_CANONICAL (orig_t) = t;
4444 /* Show statistics on references to the global type table gimple_types. */
4447 print_gimple_types_stats (void)
4450 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4451 "%ld searches, %ld collisions (ratio: %f)\n",
4452 (long) htab_size (gimple_types),
4453 (long) htab_elements (gimple_types),
4454 (long) gimple_types->searches,
4455 (long) gimple_types->collisions,
4456 htab_collisions (gimple_types));
4458 fprintf (stderr, "GIMPLE type table is empty\n");
4459 if (type_hash_cache)
4460 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4461 "%ld searches, %ld collisions (ratio: %f)\n",
4462 (long) htab_size (type_hash_cache),
4463 (long) htab_elements (type_hash_cache),
4464 (long) type_hash_cache->searches,
4465 (long) type_hash_cache->collisions,
4466 htab_collisions (type_hash_cache));
4468 fprintf (stderr, "GIMPLE type hash table is empty\n");
4469 if (gimple_canonical_types)
4470 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4471 "%ld searches, %ld collisions (ratio: %f)\n",
4472 (long) htab_size (gimple_canonical_types),
4473 (long) htab_elements (gimple_canonical_types),
4474 (long) gimple_canonical_types->searches,
4475 (long) gimple_canonical_types->collisions,
4476 htab_collisions (gimple_canonical_types));
4478 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4479 if (canonical_type_hash_cache)
4480 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4481 "%ld searches, %ld collisions (ratio: %f)\n",
4482 (long) htab_size (canonical_type_hash_cache),
4483 (long) htab_elements (canonical_type_hash_cache),
4484 (long) canonical_type_hash_cache->searches,
4485 (long) canonical_type_hash_cache->collisions,
4486 htab_collisions (canonical_type_hash_cache));
4488 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4490 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
4491 "elements, %ld searches, %ld collisions (ratio: %f)\n",
4492 (long) htab_size (gtc_visited),
4493 (long) htab_elements (gtc_visited),
4494 (long) gtc_visited->searches,
4495 (long) gtc_visited->collisions,
4496 htab_collisions (gtc_visited));
4498 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4501 /* Free the gimple type hashtables used for LTO type merging. */
4504 free_gimple_type_tables (void)
4506 /* Last chance to print stats for the tables. */
4507 if (flag_lto_report)
4508 print_gimple_types_stats ();
4512 htab_delete (gimple_types);
4513 gimple_types = NULL;
4515 if (gimple_canonical_types)
4517 htab_delete (gimple_canonical_types);
4518 gimple_canonical_types = NULL;
4520 if (type_hash_cache)
4522 htab_delete (type_hash_cache);
4523 type_hash_cache = NULL;
4525 if (canonical_type_hash_cache)
4527 htab_delete (canonical_type_hash_cache);
4528 canonical_type_hash_cache = NULL;
4532 htab_delete (gtc_visited);
4533 obstack_free (>c_ob, NULL);
4536 gimple_type_leader = NULL;
4540 /* Return a type the same as TYPE except unsigned or
4541 signed according to UNSIGNEDP. */
4544 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4548 type1 = TYPE_MAIN_VARIANT (type);
4549 if (type1 == signed_char_type_node
4550 || type1 == char_type_node
4551 || type1 == unsigned_char_type_node)
4552 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4553 if (type1 == integer_type_node || type1 == unsigned_type_node)
4554 return unsignedp ? unsigned_type_node : integer_type_node;
4555 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4556 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4557 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4558 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4559 if (type1 == long_long_integer_type_node
4560 || type1 == long_long_unsigned_type_node)
4562 ? long_long_unsigned_type_node
4563 : long_long_integer_type_node;
4564 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4566 ? int128_unsigned_type_node
4567 : int128_integer_type_node;
4568 #if HOST_BITS_PER_WIDE_INT >= 64
4569 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4570 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4572 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4573 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4574 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4575 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4576 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4577 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4578 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4579 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4581 #define GIMPLE_FIXED_TYPES(NAME) \
4582 if (type1 == short_ ## NAME ## _type_node \
4583 || type1 == unsigned_short_ ## NAME ## _type_node) \
4584 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4585 : short_ ## NAME ## _type_node; \
4586 if (type1 == NAME ## _type_node \
4587 || type1 == unsigned_ ## NAME ## _type_node) \
4588 return unsignedp ? unsigned_ ## NAME ## _type_node \
4589 : NAME ## _type_node; \
4590 if (type1 == long_ ## NAME ## _type_node \
4591 || type1 == unsigned_long_ ## NAME ## _type_node) \
4592 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4593 : long_ ## NAME ## _type_node; \
4594 if (type1 == long_long_ ## NAME ## _type_node \
4595 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4596 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4597 : long_long_ ## NAME ## _type_node;
4599 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4600 if (type1 == NAME ## _type_node \
4601 || type1 == u ## NAME ## _type_node) \
4602 return unsignedp ? u ## NAME ## _type_node \
4603 : NAME ## _type_node;
4605 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4606 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4607 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4608 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4609 : sat_ ## short_ ## NAME ## _type_node; \
4610 if (type1 == sat_ ## NAME ## _type_node \
4611 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4612 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4613 : sat_ ## NAME ## _type_node; \
4614 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4615 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4616 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4617 : sat_ ## long_ ## NAME ## _type_node; \
4618 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4619 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4620 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4621 : sat_ ## long_long_ ## NAME ## _type_node;
4623 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4624 if (type1 == sat_ ## NAME ## _type_node \
4625 || type1 == sat_ ## u ## NAME ## _type_node) \
4626 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4627 : sat_ ## NAME ## _type_node;
4629 GIMPLE_FIXED_TYPES (fract);
4630 GIMPLE_FIXED_TYPES_SAT (fract);
4631 GIMPLE_FIXED_TYPES (accum);
4632 GIMPLE_FIXED_TYPES_SAT (accum);
4634 GIMPLE_FIXED_MODE_TYPES (qq);
4635 GIMPLE_FIXED_MODE_TYPES (hq);
4636 GIMPLE_FIXED_MODE_TYPES (sq);
4637 GIMPLE_FIXED_MODE_TYPES (dq);
4638 GIMPLE_FIXED_MODE_TYPES (tq);
4639 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4640 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4641 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4642 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4643 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4644 GIMPLE_FIXED_MODE_TYPES (ha);
4645 GIMPLE_FIXED_MODE_TYPES (sa);
4646 GIMPLE_FIXED_MODE_TYPES (da);
4647 GIMPLE_FIXED_MODE_TYPES (ta);
4648 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4649 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4650 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4651 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4653 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4654 the precision; they have precision set to match their range, but
4655 may use a wider mode to match an ABI. If we change modes, we may
4656 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4657 the precision as well, so as to yield correct results for
4658 bit-field types. C++ does not have these separate bit-field
4659 types, and producing a signed or unsigned variant of an
4660 ENUMERAL_TYPE may cause other problems as well. */
4661 if (!INTEGRAL_TYPE_P (type)
4662 || TYPE_UNSIGNED (type) == unsignedp)
4665 #define TYPE_OK(node) \
4666 (TYPE_MODE (type) == TYPE_MODE (node) \
4667 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4668 if (TYPE_OK (signed_char_type_node))
4669 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4670 if (TYPE_OK (integer_type_node))
4671 return unsignedp ? unsigned_type_node : integer_type_node;
4672 if (TYPE_OK (short_integer_type_node))
4673 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4674 if (TYPE_OK (long_integer_type_node))
4675 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4676 if (TYPE_OK (long_long_integer_type_node))
4678 ? long_long_unsigned_type_node
4679 : long_long_integer_type_node);
4680 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4682 ? int128_unsigned_type_node
4683 : int128_integer_type_node);
4685 #if HOST_BITS_PER_WIDE_INT >= 64
4686 if (TYPE_OK (intTI_type_node))
4687 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4689 if (TYPE_OK (intDI_type_node))
4690 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4691 if (TYPE_OK (intSI_type_node))
4692 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4693 if (TYPE_OK (intHI_type_node))
4694 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4695 if (TYPE_OK (intQI_type_node))
4696 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4698 #undef GIMPLE_FIXED_TYPES
4699 #undef GIMPLE_FIXED_MODE_TYPES
4700 #undef GIMPLE_FIXED_TYPES_SAT
4701 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4704 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4708 /* Return an unsigned type the same as TYPE in other respects. */
4711 gimple_unsigned_type (tree type)
4713 return gimple_signed_or_unsigned_type (true, type);
4717 /* Return a signed type the same as TYPE in other respects. */
4720 gimple_signed_type (tree type)
4722 return gimple_signed_or_unsigned_type (false, type);
4726 /* Return the typed-based alias set for T, which may be an expression
4727 or a type. Return -1 if we don't do anything special. */
4730 gimple_get_alias_set (tree t)
4734 /* Permit type-punning when accessing a union, provided the access
4735 is directly through the union. For example, this code does not
4736 permit taking the address of a union member and then storing
4737 through it. Even the type-punning allowed here is a GCC
4738 extension, albeit a common and useful one; the C standard says
4739 that such accesses have implementation-defined behavior. */
4741 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4742 u = TREE_OPERAND (u, 0))
4743 if (TREE_CODE (u) == COMPONENT_REF
4744 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4747 /* That's all the expressions we handle specially. */
4751 /* For convenience, follow the C standard when dealing with
4752 character types. Any object may be accessed via an lvalue that
4753 has character type. */
4754 if (t == char_type_node
4755 || t == signed_char_type_node
4756 || t == unsigned_char_type_node)
4759 /* Allow aliasing between signed and unsigned variants of the same
4760 type. We treat the signed variant as canonical. */
4761 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4763 tree t1 = gimple_signed_type (t);
4765 /* t1 == t can happen for boolean nodes which are always unsigned. */
4767 return get_alias_set (t1);
4774 /* Data structure used to count the number of dereferences to PTR
4775 inside an expression. */
4779 unsigned num_stores;
4783 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4784 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4787 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4789 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4790 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4792 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4793 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4794 the address of 'fld' as 'ptr + offsetof(fld)'. */
4795 if (TREE_CODE (*tp) == ADDR_EXPR)
4801 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
4804 count_p->num_stores++;
4806 count_p->num_loads++;
4812 /* Count the number of direct and indirect uses for pointer PTR in
4813 statement STMT. The number of direct uses is stored in
4814 *NUM_USES_P. Indirect references are counted separately depending
4815 on whether they are store or load operations. The counts are
4816 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4819 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4820 unsigned *num_loads_p, unsigned *num_stores_p)
4829 /* Find out the total number of uses of PTR in STMT. */
4830 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4834 /* Now count the number of indirect references to PTR. This is
4835 truly awful, but we don't have much choice. There are no parent
4836 pointers inside INDIRECT_REFs, so an expression like
4837 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4838 find all the indirect and direct uses of x_1 inside. The only
4839 shortcut we can take is the fact that GIMPLE only allows
4840 INDIRECT_REFs inside the expressions below. */
4841 if (is_gimple_assign (stmt)
4842 || gimple_code (stmt) == GIMPLE_RETURN
4843 || gimple_code (stmt) == GIMPLE_ASM
4844 || is_gimple_call (stmt))
4846 struct walk_stmt_info wi;
4847 struct count_ptr_d count;
4850 count.num_stores = 0;
4851 count.num_loads = 0;
4853 memset (&wi, 0, sizeof (wi));
4855 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4857 *num_stores_p = count.num_stores;
4858 *num_loads_p = count.num_loads;
4861 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4864 /* From a tree operand OP return the base of a load or store operation
4865 or NULL_TREE if OP is not a load or a store. */
4868 get_base_loadstore (tree op)
4870 while (handled_component_p (op))
4871 op = TREE_OPERAND (op, 0);
4873 || INDIRECT_REF_P (op)
4874 || TREE_CODE (op) == MEM_REF
4875 || TREE_CODE (op) == TARGET_MEM_REF)
4880 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4881 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4882 passing the STMT, the base of the operand and DATA to it. The base
4883 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4884 or the argument of an address expression.
4885 Returns the results of these callbacks or'ed. */
4888 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4889 bool (*visit_load)(gimple, tree, void *),
4890 bool (*visit_store)(gimple, tree, void *),
4891 bool (*visit_addr)(gimple, tree, void *))
4895 if (gimple_assign_single_p (stmt))
4900 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4902 ret |= visit_store (stmt, lhs, data);
4904 rhs = gimple_assign_rhs1 (stmt);
4905 while (handled_component_p (rhs))
4906 rhs = TREE_OPERAND (rhs, 0);
4909 if (TREE_CODE (rhs) == ADDR_EXPR)
4910 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4911 else if (TREE_CODE (rhs) == TARGET_MEM_REF
4912 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
4913 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
4914 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
4915 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
4916 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
4918 lhs = gimple_assign_lhs (stmt);
4919 if (TREE_CODE (lhs) == TARGET_MEM_REF
4920 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
4921 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
4925 rhs = get_base_loadstore (rhs);
4927 ret |= visit_load (stmt, rhs, data);
4931 && (is_gimple_assign (stmt)
4932 || gimple_code (stmt) == GIMPLE_COND))
4934 for (i = 0; i < gimple_num_ops (stmt); ++i)
4935 if (gimple_op (stmt, i)
4936 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
4937 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
4939 else if (is_gimple_call (stmt))
4943 tree lhs = gimple_call_lhs (stmt);
4946 lhs = get_base_loadstore (lhs);
4948 ret |= visit_store (stmt, lhs, data);
4951 if (visit_load || visit_addr)
4952 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4954 tree rhs = gimple_call_arg (stmt, i);
4956 && TREE_CODE (rhs) == ADDR_EXPR)
4957 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
4958 else if (visit_load)
4960 rhs = get_base_loadstore (rhs);
4962 ret |= visit_load (stmt, rhs, data);
4966 && gimple_call_chain (stmt)
4967 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
4968 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
4971 && gimple_call_return_slot_opt_p (stmt)
4972 && gimple_call_lhs (stmt) != NULL_TREE
4973 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4974 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
4976 else if (gimple_code (stmt) == GIMPLE_ASM)
4979 const char *constraint;
4980 const char **oconstraints;
4981 bool allows_mem, allows_reg, is_inout;
4982 noutputs = gimple_asm_noutputs (stmt);
4983 oconstraints = XALLOCAVEC (const char *, noutputs);
4984 if (visit_store || visit_addr)
4985 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
4987 tree link = gimple_asm_output_op (stmt, i);
4988 tree op = get_base_loadstore (TREE_VALUE (link));
4989 if (op && visit_store)
4990 ret |= visit_store (stmt, op, data);
4993 constraint = TREE_STRING_POINTER
4994 (TREE_VALUE (TREE_PURPOSE (link)));
4995 oconstraints[i] = constraint;
4996 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
4997 &allows_reg, &is_inout);
4998 if (op && !allows_reg && allows_mem)
4999 ret |= visit_addr (stmt, op, data);
5002 if (visit_load || visit_addr)
5003 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5005 tree link = gimple_asm_input_op (stmt, i);
5006 tree op = TREE_VALUE (link);
5008 && TREE_CODE (op) == ADDR_EXPR)
5009 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5010 else if (visit_load || visit_addr)
5012 op = get_base_loadstore (op);
5016 ret |= visit_load (stmt, op, data);
5019 constraint = TREE_STRING_POINTER
5020 (TREE_VALUE (TREE_PURPOSE (link)));
5021 parse_input_constraint (&constraint, 0, 0, noutputs,
5023 &allows_mem, &allows_reg);
5024 if (!allows_reg && allows_mem)
5025 ret |= visit_addr (stmt, op, data);
5031 else if (gimple_code (stmt) == GIMPLE_RETURN)
5033 tree op = gimple_return_retval (stmt);
5037 && TREE_CODE (op) == ADDR_EXPR)
5038 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5039 else if (visit_load)
5041 op = get_base_loadstore (op);
5043 ret |= visit_load (stmt, op, data);
5048 && gimple_code (stmt) == GIMPLE_PHI)
5050 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5052 tree op = PHI_ARG_DEF (stmt, i);
5053 if (TREE_CODE (op) == ADDR_EXPR)
5054 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5061 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5062 should make a faster clone for this case. */
5065 walk_stmt_load_store_ops (gimple stmt, void *data,
5066 bool (*visit_load)(gimple, tree, void *),
5067 bool (*visit_store)(gimple, tree, void *))
5069 return walk_stmt_load_store_addr_ops (stmt, data,
5070 visit_load, visit_store, NULL);
5073 /* Helper for gimple_ior_addresses_taken_1. */
5076 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5077 tree addr, void *data)
5079 bitmap addresses_taken = (bitmap)data;
5080 addr = get_base_address (addr);
5084 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5090 /* Set the bit for the uid of all decls that have their address taken
5091 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5092 were any in this stmt. */
5095 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5097 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5098 gimple_ior_addresses_taken_1);
5102 /* Return a printable name for symbol DECL. */
5105 gimple_decl_printable_name (tree decl, int verbosity)
5107 if (!DECL_NAME (decl))
5110 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5112 const char *str, *mangled_str;
5113 int dmgl_opts = DMGL_NO_OPTS;
5117 dmgl_opts = DMGL_VERBOSE
5121 if (TREE_CODE (decl) == FUNCTION_DECL)
5122 dmgl_opts |= DMGL_PARAMS;
5125 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5126 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5127 return (str) ? str : mangled_str;
5130 return IDENTIFIER_POINTER (DECL_NAME (decl));
5133 /* Return true when STMT is builtins call to CODE. */
5136 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5139 return (is_gimple_call (stmt)
5140 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5141 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5142 && DECL_FUNCTION_CODE (fndecl) == code);
5145 /* Return true if STMT clobbers memory. STMT is required to be a
5149 gimple_asm_clobbers_memory_p (const_gimple stmt)
5153 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5155 tree op = gimple_asm_clobber_op (stmt, i);
5156 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5162 #include "gt-gimple.h"