1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
82 #ifdef GATHER_STATISTICS
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
97 #endif /* GATHER_STATISTICS */
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
103 /* Private API manipulation functions shared only with some
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
112 /* Set the code for statement G to CODE. */
115 gimple_set_code (gimple g, enum gimple_code code)
117 g->gsbase.code = code;
120 /* Return the number of bytes needed to hold a GIMPLE statement with
124 gimple_size (enum gimple_code code)
126 return gsstruct_code_size[gss_for_code (code)];
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
138 size = gimple_size (code);
140 size += sizeof (tree) * (num_ops - 1);
142 #ifdef GATHER_STATISTICS
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
164 gimple_set_subcode (gimple g, unsigned subcode)
166 /* We only have 16 bits for the RHS code. Assert that we are not
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
195 gimple_build_return (tree retval)
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
199 gimple_return_set_retval (s, retval);
203 /* Reset alias information on call S. */
206 gimple_call_reset_alias_info (gimple s)
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
215 pt_solution_reset (gimple_call_clobber_set (s));
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
224 gimple_build_call_1 (tree fn, unsigned nargs)
226 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
227 if (TREE_CODE (fn) == FUNCTION_DECL)
228 fn = build_fold_addr_expr (fn);
229 gimple_set_op (s, 1, fn);
230 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
231 gimple_call_reset_alias_info (s);
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
240 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
243 unsigned nargs = VEC_length (tree, args);
244 gimple call = gimple_build_call_1 (fn, nargs);
246 for (i = 0; i < nargs; i++)
247 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
257 gimple_build_call (tree fn, unsigned nargs, ...)
263 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
265 call = gimple_build_call_1 (fn, nargs);
267 va_start (ap, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
280 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
285 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
287 call = gimple_build_call_1 (fn, nargs);
289 for (i = 0; i < nargs; i++)
290 gimple_call_set_arg (call, i, va_arg (ap, tree));
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
301 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
303 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304 s->gsbase.subcode |= GF_CALL_INTERNAL;
305 gimple_call_set_internal_fn (s, fn);
306 gimple_call_reset_alias_info (s);
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
315 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
321 call = gimple_build_call_internal_1 (fn, nargs);
322 va_start (ap, nargs);
323 for (i = 0; i < nargs; i++)
324 gimple_call_set_arg (call, i, va_arg (ap, tree));
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
335 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
340 nargs = VEC_length (tree, args);
341 call = gimple_build_call_internal_1 (fn, nargs);
342 for (i = 0; i < nargs; i++)
343 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
354 gimple_build_call_from_tree (tree t)
358 tree fndecl = get_callee_fndecl (t);
360 gcc_assert (TREE_CODE (t) == CALL_EXPR);
362 nargs = call_expr_nargs (t);
363 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
365 for (i = 0; i < nargs; i++)
366 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
368 gimple_set_block (call, TREE_BLOCK (t));
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
373 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
374 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
376 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
377 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
378 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
380 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
381 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
382 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
383 gimple_set_no_warning (call, TREE_NO_WARNING (t));
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390 *OP1_P, *OP2_P and *OP3_P respectively. */
393 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 tree *op2_p, tree *op3_p)
396 enum gimple_rhs_class grhs_class;
398 *subcode_p = TREE_CODE (expr);
399 grhs_class = get_gimple_rhs_class (*subcode_p);
401 if (grhs_class == GIMPLE_TERNARY_RHS)
403 *op1_p = TREE_OPERAND (expr, 0);
404 *op2_p = TREE_OPERAND (expr, 1);
405 *op3_p = TREE_OPERAND (expr, 2);
407 else if (grhs_class == GIMPLE_BINARY_RHS)
409 *op1_p = TREE_OPERAND (expr, 0);
410 *op2_p = TREE_OPERAND (expr, 1);
413 else if (grhs_class == GIMPLE_UNARY_RHS)
415 *op1_p = TREE_OPERAND (expr, 0);
419 else if (grhs_class == GIMPLE_SINGLE_RHS)
430 /* Build a GIMPLE_ASSIGN statement.
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
436 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
438 enum tree_code subcode;
441 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
447 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
452 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
453 tree op2, tree op3 MEM_STAT_DECL)
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
460 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
462 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
464 gimple_assign_set_lhs (p, lhs);
465 gimple_assign_set_rhs1 (p, op1);
468 gcc_assert (num_ops > 2);
469 gimple_assign_set_rhs2 (p, op2);
474 gcc_assert (num_ops > 3);
475 gimple_assign_set_rhs3 (p, op3);
482 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
484 DST/SRC are the destination and source respectively. You can pass
485 ungimplified trees in DST or SRC, in which case they will be
486 converted to a gimple operand if necessary.
488 This function returns the newly created GIMPLE_ASSIGN tuple. */
491 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
493 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
494 gimplify_and_add (t, seq_p);
496 return gimple_seq_last_stmt (*seq_p);
500 /* Build a GIMPLE_COND statement.
502 PRED is the condition used to compare LHS and the RHS.
503 T_LABEL is the label to jump to if the condition is true.
504 F_LABEL is the label to jump to otherwise. */
507 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
508 tree t_label, tree f_label)
512 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
513 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
514 gimple_cond_set_lhs (p, lhs);
515 gimple_cond_set_rhs (p, rhs);
516 gimple_cond_set_true_label (p, t_label);
517 gimple_cond_set_false_label (p, f_label);
522 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
525 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
526 tree *lhs_p, tree *rhs_p)
528 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
529 || TREE_CODE (cond) == TRUTH_NOT_EXPR
530 || is_gimple_min_invariant (cond)
531 || SSA_VAR_P (cond));
533 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
535 /* Canonicalize conditionals of the form 'if (!VAL)'. */
536 if (*code_p == TRUTH_NOT_EXPR)
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
542 /* Canonicalize conditionals of the form 'if (VAL)' */
543 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
546 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
547 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
552 /* Build a GIMPLE_COND statement from the conditional expression tree
553 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
556 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
561 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
562 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
565 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566 boolean expression tree COND. */
569 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
574 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
575 gimple_cond_set_condition (stmt, code, lhs, rhs);
578 /* Build a GIMPLE_LABEL statement for LABEL. */
581 gimple_build_label (tree label)
583 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
584 gimple_label_set_label (p, label);
588 /* Build a GIMPLE_GOTO statement to label DEST. */
591 gimple_build_goto (tree dest)
593 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
594 gimple_goto_set_dest (p, dest);
599 /* Build a GIMPLE_NOP statement. */
602 gimple_build_nop (void)
604 return gimple_alloc (GIMPLE_NOP, 0);
608 /* Build a GIMPLE_BIND statement.
609 VARS are the variables in BODY.
610 BLOCK is the containing block. */
613 gimple_build_bind (tree vars, gimple_seq body, tree block)
615 gimple p = gimple_alloc (GIMPLE_BIND, 0);
616 gimple_bind_set_vars (p, vars);
618 gimple_bind_set_body (p, body);
620 gimple_bind_set_block (p, block);
624 /* Helper function to set the simple fields of a asm stmt.
626 STRING is a pointer to a string that is the asm blocks assembly code.
627 NINPUT is the number of register inputs.
628 NOUTPUT is the number of register outputs.
629 NCLOBBERS is the number of clobbered registers.
633 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
634 unsigned nclobbers, unsigned nlabels)
637 int size = strlen (string);
639 /* ASMs with labels cannot have outputs. This should have been
640 enforced by the front end. */
641 gcc_assert (nlabels == 0 || noutputs == 0);
643 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
644 ninputs + noutputs + nclobbers + nlabels);
646 p->gimple_asm.ni = ninputs;
647 p->gimple_asm.no = noutputs;
648 p->gimple_asm.nc = nclobbers;
649 p->gimple_asm.nl = nlabels;
650 p->gimple_asm.string = ggc_alloc_string (string, size);
652 #ifdef GATHER_STATISTICS
653 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
659 /* Build a GIMPLE_ASM statement.
661 STRING is the assembly code.
662 NINPUT is the number of register inputs.
663 NOUTPUT is the number of register outputs.
664 NCLOBBERS is the number of clobbered registers.
665 INPUTS is a vector of the input register parameters.
666 OUTPUTS is a vector of the output register parameters.
667 CLOBBERS is a vector of the clobbered register parameters.
668 LABELS is a vector of destination labels. */
671 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
672 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
673 VEC(tree,gc)* labels)
678 p = gimple_build_asm_1 (string,
679 VEC_length (tree, inputs),
680 VEC_length (tree, outputs),
681 VEC_length (tree, clobbers),
682 VEC_length (tree, labels));
684 for (i = 0; i < VEC_length (tree, inputs); i++)
685 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
687 for (i = 0; i < VEC_length (tree, outputs); i++)
688 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
690 for (i = 0; i < VEC_length (tree, clobbers); i++)
691 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
693 for (i = 0; i < VEC_length (tree, labels); i++)
694 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
699 /* Build a GIMPLE_CATCH statement.
701 TYPES are the catch types.
702 HANDLER is the exception handler. */
705 gimple_build_catch (tree types, gimple_seq handler)
707 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
708 gimple_catch_set_types (p, types);
710 gimple_catch_set_handler (p, handler);
715 /* Build a GIMPLE_EH_FILTER statement.
717 TYPES are the filter's types.
718 FAILURE is the filter's failure action. */
721 gimple_build_eh_filter (tree types, gimple_seq failure)
723 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
724 gimple_eh_filter_set_types (p, types);
726 gimple_eh_filter_set_failure (p, failure);
731 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
734 gimple_build_eh_must_not_throw (tree decl)
736 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
738 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
739 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
740 gimple_eh_must_not_throw_set_fndecl (p, decl);
745 /* Build a GIMPLE_TRY statement.
747 EVAL is the expression to evaluate.
748 CLEANUP is the cleanup expression.
749 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
750 whether this is a try/catch or a try/finally respectively. */
753 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
754 enum gimple_try_flags kind)
758 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
759 p = gimple_alloc (GIMPLE_TRY, 0);
760 gimple_set_subcode (p, kind);
762 gimple_try_set_eval (p, eval);
764 gimple_try_set_cleanup (p, cleanup);
769 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
771 CLEANUP is the cleanup expression. */
774 gimple_build_wce (gimple_seq cleanup)
776 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
778 gimple_wce_set_cleanup (p, cleanup);
784 /* Build a GIMPLE_RESX statement. */
787 gimple_build_resx (int region)
789 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
790 p->gimple_eh_ctrl.region = region;
795 /* The helper for constructing a gimple switch statement.
796 INDEX is the switch's index.
797 NLABELS is the number of labels in the switch excluding the default.
798 DEFAULT_LABEL is the default label for the switch statement. */
801 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
803 /* nlabels + 1 default label + 1 index. */
804 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
805 1 + (default_label != NULL) + nlabels);
806 gimple_switch_set_index (p, index);
808 gimple_switch_set_default_label (p, default_label);
813 /* Build a GIMPLE_SWITCH statement.
815 INDEX is the switch's index.
816 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
817 ... are the labels excluding the default. */
820 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
824 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
826 /* Store the rest of the labels. */
827 va_start (al, default_label);
828 offset = (default_label != NULL);
829 for (i = 0; i < nlabels; i++)
830 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
837 /* Build a GIMPLE_SWITCH statement.
839 INDEX is the switch's index.
840 DEFAULT_LABEL is the default label
841 ARGS is a vector of labels excluding the default. */
844 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
846 unsigned i, offset, nlabels = VEC_length (tree, args);
847 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
849 /* Copy the labels from the vector to the switch statement. */
850 offset = (default_label != NULL);
851 for (i = 0; i < nlabels; i++)
852 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
857 /* Build a GIMPLE_EH_DISPATCH statement. */
860 gimple_build_eh_dispatch (int region)
862 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
863 p->gimple_eh_ctrl.region = region;
867 /* Build a new GIMPLE_DEBUG_BIND statement.
869 VAR is bound to VALUE; block and location are taken from STMT. */
872 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
874 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
875 (unsigned)GIMPLE_DEBUG_BIND, 2
878 gimple_debug_bind_set_var (p, var);
879 gimple_debug_bind_set_value (p, value);
882 gimple_set_block (p, gimple_block (stmt));
883 gimple_set_location (p, gimple_location (stmt));
890 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
892 VAR is bound to VALUE; block and location are taken from STMT. */
895 gimple_build_debug_source_bind_stat (tree var, tree value,
896 gimple stmt MEM_STAT_DECL)
898 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
899 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
902 gimple_debug_source_bind_set_var (p, var);
903 gimple_debug_source_bind_set_value (p, value);
906 gimple_set_block (p, gimple_block (stmt));
907 gimple_set_location (p, gimple_location (stmt));
914 /* Build a GIMPLE_OMP_CRITICAL statement.
916 BODY is the sequence of statements for which only one thread can execute.
917 NAME is optional identifier for this critical block. */
920 gimple_build_omp_critical (gimple_seq body, tree name)
922 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
923 gimple_omp_critical_set_name (p, name);
925 gimple_omp_set_body (p, body);
930 /* Build a GIMPLE_OMP_FOR statement.
932 BODY is sequence of statements inside the for loop.
933 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
934 lastprivate, reductions, ordered, schedule, and nowait.
935 COLLAPSE is the collapse count.
936 PRE_BODY is the sequence of statements that are loop invariant. */
939 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
942 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
944 gimple_omp_set_body (p, body);
945 gimple_omp_for_set_clauses (p, clauses);
946 p->gimple_omp_for.collapse = collapse;
947 p->gimple_omp_for.iter
948 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
950 gimple_omp_for_set_pre_body (p, pre_body);
956 /* Build a GIMPLE_OMP_PARALLEL statement.
958 BODY is sequence of statements which are executed in parallel.
959 CLAUSES, are the OMP parallel construct's clauses.
960 CHILD_FN is the function created for the parallel threads to execute.
961 DATA_ARG are the shared data argument(s). */
964 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
967 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
969 gimple_omp_set_body (p, body);
970 gimple_omp_parallel_set_clauses (p, clauses);
971 gimple_omp_parallel_set_child_fn (p, child_fn);
972 gimple_omp_parallel_set_data_arg (p, data_arg);
978 /* Build a GIMPLE_OMP_TASK statement.
980 BODY is sequence of statements which are executed by the explicit task.
981 CLAUSES, are the OMP parallel construct's clauses.
982 CHILD_FN is the function created for the parallel threads to execute.
983 DATA_ARG are the shared data argument(s).
984 COPY_FN is the optional function for firstprivate initialization.
985 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
988 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
989 tree data_arg, tree copy_fn, tree arg_size,
992 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
994 gimple_omp_set_body (p, body);
995 gimple_omp_task_set_clauses (p, clauses);
996 gimple_omp_task_set_child_fn (p, child_fn);
997 gimple_omp_task_set_data_arg (p, data_arg);
998 gimple_omp_task_set_copy_fn (p, copy_fn);
999 gimple_omp_task_set_arg_size (p, arg_size);
1000 gimple_omp_task_set_arg_align (p, arg_align);
1006 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1008 BODY is the sequence of statements in the section. */
1011 gimple_build_omp_section (gimple_seq body)
1013 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1015 gimple_omp_set_body (p, body);
1021 /* Build a GIMPLE_OMP_MASTER statement.
1023 BODY is the sequence of statements to be executed by just the master. */
1026 gimple_build_omp_master (gimple_seq body)
1028 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1030 gimple_omp_set_body (p, body);
1036 /* Build a GIMPLE_OMP_CONTINUE statement.
1038 CONTROL_DEF is the definition of the control variable.
1039 CONTROL_USE is the use of the control variable. */
1042 gimple_build_omp_continue (tree control_def, tree control_use)
1044 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1045 gimple_omp_continue_set_control_def (p, control_def);
1046 gimple_omp_continue_set_control_use (p, control_use);
1050 /* Build a GIMPLE_OMP_ORDERED statement.
1052 BODY is the sequence of statements inside a loop that will executed in
1056 gimple_build_omp_ordered (gimple_seq body)
1058 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1060 gimple_omp_set_body (p, body);
1066 /* Build a GIMPLE_OMP_RETURN statement.
1067 WAIT_P is true if this is a non-waiting return. */
1070 gimple_build_omp_return (bool wait_p)
1072 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1074 gimple_omp_return_set_nowait (p);
1080 /* Build a GIMPLE_OMP_SECTIONS statement.
1082 BODY is a sequence of section statements.
1083 CLAUSES are any of the OMP sections contsruct's clauses: private,
1084 firstprivate, lastprivate, reduction, and nowait. */
1087 gimple_build_omp_sections (gimple_seq body, tree clauses)
1089 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1091 gimple_omp_set_body (p, body);
1092 gimple_omp_sections_set_clauses (p, clauses);
1098 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1101 gimple_build_omp_sections_switch (void)
1103 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1107 /* Build a GIMPLE_OMP_SINGLE statement.
1109 BODY is the sequence of statements that will be executed once.
1110 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1111 copyprivate, nowait. */
1114 gimple_build_omp_single (gimple_seq body, tree clauses)
1116 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1118 gimple_omp_set_body (p, body);
1119 gimple_omp_single_set_clauses (p, clauses);
1125 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1128 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1130 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1131 gimple_omp_atomic_load_set_lhs (p, lhs);
1132 gimple_omp_atomic_load_set_rhs (p, rhs);
1136 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1138 VAL is the value we are storing. */
1141 gimple_build_omp_atomic_store (tree val)
1143 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1144 gimple_omp_atomic_store_set_val (p, val);
1148 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1149 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1152 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1154 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1155 /* Ensure all the predictors fit into the lower bits of the subcode. */
1156 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1157 gimple_predict_set_predictor (p, predictor);
1158 gimple_predict_set_outcome (p, outcome);
1162 #if defined ENABLE_GIMPLE_CHECKING
1163 /* Complain of a gimple type mismatch and die. */
1166 gimple_check_failed (const_gimple gs, const char *file, int line,
1167 const char *function, enum gimple_code code,
1168 enum tree_code subcode)
1170 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1171 gimple_code_name[code],
1172 tree_code_name[subcode],
1173 gimple_code_name[gimple_code (gs)],
1174 gs->gsbase.subcode > 0
1175 ? tree_code_name[gs->gsbase.subcode]
1177 function, trim_filename (file), line);
1179 #endif /* ENABLE_GIMPLE_CHECKING */
1182 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1183 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1187 gimple_seq_alloc (void)
1189 gimple_seq seq = gimple_seq_cache;
1192 gimple_seq_cache = gimple_seq_cache->next_free;
1193 gcc_assert (gimple_seq_cache != seq);
1194 memset (seq, 0, sizeof (*seq));
1198 seq = ggc_alloc_cleared_gimple_seq_d ();
1199 #ifdef GATHER_STATISTICS
1200 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1201 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1208 /* Return SEQ to the free pool of GIMPLE sequences. */
1211 gimple_seq_free (gimple_seq seq)
1216 gcc_assert (gimple_seq_first (seq) == NULL);
1217 gcc_assert (gimple_seq_last (seq) == NULL);
1219 /* If this triggers, it's a sign that the same list is being freed
1221 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1223 /* Add SEQ to the pool of free sequences. */
1224 seq->next_free = gimple_seq_cache;
1225 gimple_seq_cache = seq;
1229 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1230 *SEQ_P is NULL, a new sequence is allocated. */
1233 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1235 gimple_stmt_iterator si;
1241 *seq_p = gimple_seq_alloc ();
1243 si = gsi_last (*seq_p);
1244 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1248 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1249 NULL, a new sequence is allocated. */
1252 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1254 gimple_stmt_iterator si;
1260 *dst_p = gimple_seq_alloc ();
1262 si = gsi_last (*dst_p);
1263 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1267 /* Helper function of empty_body_p. Return true if STMT is an empty
1271 empty_stmt_p (gimple stmt)
1273 if (gimple_code (stmt) == GIMPLE_NOP)
1275 if (gimple_code (stmt) == GIMPLE_BIND)
1276 return empty_body_p (gimple_bind_body (stmt));
1281 /* Return true if BODY contains nothing but empty statements. */
1284 empty_body_p (gimple_seq body)
1286 gimple_stmt_iterator i;
1288 if (gimple_seq_empty_p (body))
1290 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1291 if (!empty_stmt_p (gsi_stmt (i))
1292 && !is_gimple_debug (gsi_stmt (i)))
1299 /* Perform a deep copy of sequence SRC and return the result. */
1302 gimple_seq_copy (gimple_seq src)
1304 gimple_stmt_iterator gsi;
1305 gimple_seq new_seq = gimple_seq_alloc ();
1308 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1310 stmt = gimple_copy (gsi_stmt (gsi));
1311 gimple_seq_add_stmt (&new_seq, stmt);
1318 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1319 on each one. WI is as in walk_gimple_stmt.
1321 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1322 value is stored in WI->CALLBACK_RESULT and the statement that
1323 produced the value is returned.
1325 Otherwise, all the statements are walked and NULL returned. */
1328 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1329 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1331 gimple_stmt_iterator gsi;
1333 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1335 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1338 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1341 wi->callback_result = ret;
1342 return gsi_stmt (gsi);
1347 wi->callback_result = NULL_TREE;
1353 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1356 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1357 struct walk_stmt_info *wi)
1361 const char **oconstraints;
1363 const char *constraint;
1364 bool allows_mem, allows_reg, is_inout;
1366 noutputs = gimple_asm_noutputs (stmt);
1367 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1372 for (i = 0; i < noutputs; i++)
1374 op = gimple_asm_output_op (stmt, i);
1375 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1376 oconstraints[i] = constraint;
1377 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1380 wi->val_only = (allows_reg || !allows_mem);
1381 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1386 n = gimple_asm_ninputs (stmt);
1387 for (i = 0; i < n; i++)
1389 op = gimple_asm_input_op (stmt, i);
1390 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1391 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1392 oconstraints, &allows_mem, &allows_reg);
1395 wi->val_only = (allows_reg || !allows_mem);
1396 /* Although input "m" is not really a LHS, we need a lvalue. */
1397 wi->is_lhs = !wi->val_only;
1399 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1407 wi->val_only = true;
1410 n = gimple_asm_nlabels (stmt);
1411 for (i = 0; i < n; i++)
1413 op = gimple_asm_label_op (stmt, i);
1414 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1423 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1424 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1426 CALLBACK_OP is called on each operand of STMT via walk_tree.
1427 Additional parameters to walk_tree must be stored in WI. For each operand
1428 OP, walk_tree is called as:
1430 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1432 If CALLBACK_OP returns non-NULL for an operand, the remaining
1433 operands are not scanned.
1435 The return value is that returned by the last call to walk_tree, or
1436 NULL_TREE if no CALLBACK_OP is specified. */
1439 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1440 struct walk_stmt_info *wi)
1442 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1444 tree ret = NULL_TREE;
1446 switch (gimple_code (stmt))
1449 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1450 is a register variable, we may use a COMPONENT_REF on the RHS. */
1453 tree lhs = gimple_assign_lhs (stmt);
1455 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1456 || !gimple_assign_single_p (stmt);
1459 for (i = 1; i < gimple_num_ops (stmt); i++)
1461 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1467 /* Walk the LHS. If the RHS is appropriate for a memory, we
1468 may use a COMPONENT_REF on the LHS. */
1471 /* If the RHS has more than 1 operand, it is not appropriate
1473 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1474 || !gimple_assign_single_p (stmt);
1478 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1484 wi->val_only = true;
1493 wi->val_only = true;
1496 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1500 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1504 for (i = 0; i < gimple_call_num_args (stmt); i++)
1508 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1509 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1515 if (gimple_call_lhs (stmt))
1521 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1524 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1532 wi->val_only = true;
1537 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1543 case GIMPLE_EH_FILTER:
1544 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1551 ret = walk_gimple_asm (stmt, callback_op, wi);
1556 case GIMPLE_OMP_CONTINUE:
1557 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1558 callback_op, wi, pset);
1562 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1563 callback_op, wi, pset);
1568 case GIMPLE_OMP_CRITICAL:
1569 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1575 case GIMPLE_OMP_FOR:
1576 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1580 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1582 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1586 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1590 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1594 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1601 case GIMPLE_OMP_PARALLEL:
1602 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1606 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1610 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1616 case GIMPLE_OMP_TASK:
1617 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1621 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1625 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1629 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1633 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1637 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1643 case GIMPLE_OMP_SECTIONS:
1644 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1649 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1656 case GIMPLE_OMP_SINGLE:
1657 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1663 case GIMPLE_OMP_ATOMIC_LOAD:
1664 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1669 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1675 case GIMPLE_OMP_ATOMIC_STORE:
1676 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1682 /* Tuples that do not have operands. */
1685 case GIMPLE_OMP_RETURN:
1686 case GIMPLE_PREDICT:
1691 enum gimple_statement_structure_enum gss;
1692 gss = gimple_statement_structure (stmt);
1693 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1694 for (i = 0; i < gimple_num_ops (stmt); i++)
1696 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1708 /* Walk the current statement in GSI (optionally using traversal state
1709 stored in WI). If WI is NULL, no state is kept during traversal.
1710 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1711 that it has handled all the operands of the statement, its return
1712 value is returned. Otherwise, the return value from CALLBACK_STMT
1713 is discarded and its operands are scanned.
1715 If CALLBACK_STMT is NULL or it didn't handle the operands,
1716 CALLBACK_OP is called on each operand of the statement via
1717 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1718 operand, the remaining operands are not scanned. In this case, the
1719 return value from CALLBACK_OP is returned.
1721 In any other case, NULL_TREE is returned. */
1724 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1725 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1729 gimple stmt = gsi_stmt (*gsi);
1734 if (wi && wi->want_locations && gimple_has_location (stmt))
1735 input_location = gimple_location (stmt);
1739 /* Invoke the statement callback. Return if the callback handled
1740 all of STMT operands by itself. */
1743 bool handled_ops = false;
1744 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1748 /* If CALLBACK_STMT did not handle operands, it should not have
1749 a value to return. */
1750 gcc_assert (tree_ret == NULL);
1752 /* Re-read stmt in case the callback changed it. */
1753 stmt = gsi_stmt (*gsi);
1756 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1759 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1764 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1765 switch (gimple_code (stmt))
1768 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1771 return wi->callback_result;
1775 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1778 return wi->callback_result;
1781 case GIMPLE_EH_FILTER:
1782 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1785 return wi->callback_result;
1789 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1792 return wi->callback_result;
1794 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1797 return wi->callback_result;
1800 case GIMPLE_OMP_FOR:
1801 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1804 return wi->callback_result;
1807 case GIMPLE_OMP_CRITICAL:
1808 case GIMPLE_OMP_MASTER:
1809 case GIMPLE_OMP_ORDERED:
1810 case GIMPLE_OMP_SECTION:
1811 case GIMPLE_OMP_PARALLEL:
1812 case GIMPLE_OMP_TASK:
1813 case GIMPLE_OMP_SECTIONS:
1814 case GIMPLE_OMP_SINGLE:
1815 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1818 return wi->callback_result;
1821 case GIMPLE_WITH_CLEANUP_EXPR:
1822 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1825 return wi->callback_result;
1829 gcc_assert (!gimple_has_substatements (stmt));
1837 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1840 gimple_set_body (tree fndecl, gimple_seq seq)
1842 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1845 /* If FNDECL still does not have a function structure associated
1846 with it, then it does not make sense for it to receive a
1848 gcc_assert (seq == NULL);
1851 fn->gimple_body = seq;
1855 /* Return the body of GIMPLE statements for function FN. After the
1856 CFG pass, the function body doesn't exist anymore because it has
1857 been split up into basic blocks. In this case, it returns
1861 gimple_body (tree fndecl)
1863 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1864 return fn ? fn->gimple_body : NULL;
1867 /* Return true when FNDECL has Gimple body either in unlowered
1870 gimple_has_body_p (tree fndecl)
1872 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1873 return (gimple_body (fndecl) || (fn && fn->cfg));
1876 /* Return true if calls C1 and C2 are known to go to the same function. */
1879 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1881 if (gimple_call_internal_p (c1))
1882 return (gimple_call_internal_p (c2)
1883 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1885 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1886 || (gimple_call_fndecl (c1)
1887 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1890 /* Detect flags from a GIMPLE_CALL. This is just like
1891 call_expr_flags, but for gimple tuples. */
1894 gimple_call_flags (const_gimple stmt)
1897 tree decl = gimple_call_fndecl (stmt);
1900 flags = flags_from_decl_or_type (decl);
1901 else if (gimple_call_internal_p (stmt))
1902 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1904 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1906 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1907 flags |= ECF_NOTHROW;
1912 /* Return the "fn spec" string for call STMT. */
1915 gimple_call_fnspec (const_gimple stmt)
1919 type = gimple_call_fntype (stmt);
1923 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1927 return TREE_VALUE (TREE_VALUE (attr));
1930 /* Detects argument flags for argument number ARG on call STMT. */
1933 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1935 tree attr = gimple_call_fnspec (stmt);
1937 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1940 switch (TREE_STRING_POINTER (attr)[1 + arg])
1947 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1950 return EAF_NOCLOBBER | EAF_NOESCAPE;
1953 return EAF_DIRECT | EAF_NOESCAPE;
1956 return EAF_NOESCAPE;
1964 /* Detects return flags for the call STMT. */
1967 gimple_call_return_flags (const_gimple stmt)
1971 if (gimple_call_flags (stmt) & ECF_MALLOC)
1974 attr = gimple_call_fnspec (stmt);
1975 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1978 switch (TREE_STRING_POINTER (attr)[0])
1984 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1996 /* Return true if GS is a copy assignment. */
1999 gimple_assign_copy_p (gimple gs)
2001 return (gimple_assign_single_p (gs)
2002 && is_gimple_val (gimple_op (gs, 1)));
2006 /* Return true if GS is a SSA_NAME copy assignment. */
2009 gimple_assign_ssa_name_copy_p (gimple gs)
2011 return (gimple_assign_single_p (gs)
2012 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2013 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2017 /* Return true if GS is an assignment with a unary RHS, but the
2018 operator has no effect on the assigned value. The logic is adapted
2019 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2020 instances in which STRIP_NOPS was previously applied to the RHS of
2023 NOTE: In the use cases that led to the creation of this function
2024 and of gimple_assign_single_p, it is typical to test for either
2025 condition and to proceed in the same manner. In each case, the
2026 assigned value is represented by the single RHS operand of the
2027 assignment. I suspect there may be cases where gimple_assign_copy_p,
2028 gimple_assign_single_p, or equivalent logic is used where a similar
2029 treatment of unary NOPs is appropriate. */
2032 gimple_assign_unary_nop_p (gimple gs)
2034 return (is_gimple_assign (gs)
2035 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2036 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2037 && gimple_assign_rhs1 (gs) != error_mark_node
2038 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2039 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2042 /* Set BB to be the basic block holding G. */
2045 gimple_set_bb (gimple stmt, basic_block bb)
2047 stmt->gsbase.bb = bb;
2049 /* If the statement is a label, add the label to block-to-labels map
2050 so that we can speed up edge creation for GIMPLE_GOTOs. */
2051 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2056 t = gimple_label_label (stmt);
2057 uid = LABEL_DECL_UID (t);
2060 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2061 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2062 if (old_len <= (unsigned) uid)
2064 unsigned new_len = 3 * uid / 2 + 1;
2066 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2071 VEC_replace (basic_block, label_to_block_map, uid, bb);
2076 /* Modify the RHS of the assignment pointed-to by GSI using the
2077 operands in the expression tree EXPR.
2079 NOTE: The statement pointed-to by GSI may be reallocated if it
2080 did not have enough operand slots.
2082 This function is useful to convert an existing tree expression into
2083 the flat representation used for the RHS of a GIMPLE assignment.
2084 It will reallocate memory as needed to expand or shrink the number
2085 of operand slots needed to represent EXPR.
2087 NOTE: If you find yourself building a tree and then calling this
2088 function, you are most certainly doing it the slow way. It is much
2089 better to build a new assignment or to use the function
2090 gimple_assign_set_rhs_with_ops, which does not require an
2091 expression tree to be built. */
2094 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2096 enum tree_code subcode;
2099 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2100 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2104 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2105 operands OP1, OP2 and OP3.
2107 NOTE: The statement pointed-to by GSI may be reallocated if it
2108 did not have enough operand slots. */
2111 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2112 tree op1, tree op2, tree op3)
2114 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2115 gimple stmt = gsi_stmt (*gsi);
2117 /* If the new CODE needs more operands, allocate a new statement. */
2118 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2120 tree lhs = gimple_assign_lhs (stmt);
2121 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2122 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2123 gsi_replace (gsi, new_stmt, true);
2126 /* The LHS needs to be reset as this also changes the SSA name
2128 gimple_assign_set_lhs (stmt, lhs);
2131 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2132 gimple_set_subcode (stmt, code);
2133 gimple_assign_set_rhs1 (stmt, op1);
2134 if (new_rhs_ops > 1)
2135 gimple_assign_set_rhs2 (stmt, op2);
2136 if (new_rhs_ops > 2)
2137 gimple_assign_set_rhs3 (stmt, op3);
2141 /* Return the LHS of a statement that performs an assignment,
2142 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2143 for a call to a function that returns no value, or for a
2144 statement other than an assignment or a call. */
2147 gimple_get_lhs (const_gimple stmt)
2149 enum gimple_code code = gimple_code (stmt);
2151 if (code == GIMPLE_ASSIGN)
2152 return gimple_assign_lhs (stmt);
2153 else if (code == GIMPLE_CALL)
2154 return gimple_call_lhs (stmt);
2160 /* Set the LHS of a statement that performs an assignment,
2161 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2164 gimple_set_lhs (gimple stmt, tree lhs)
2166 enum gimple_code code = gimple_code (stmt);
2168 if (code == GIMPLE_ASSIGN)
2169 gimple_assign_set_lhs (stmt, lhs);
2170 else if (code == GIMPLE_CALL)
2171 gimple_call_set_lhs (stmt, lhs);
2176 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2177 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2178 expression with a different value.
2180 This will update any annotations (say debug bind stmts) referring
2181 to the original LHS, so that they use the RHS instead. This is
2182 done even if NLHS and LHS are the same, for it is understood that
2183 the RHS will be modified afterwards, and NLHS will not be assigned
2184 an equivalent value.
2186 Adjusting any non-annotation uses of the LHS, if needed, is a
2187 responsibility of the caller.
2189 The effect of this call should be pretty much the same as that of
2190 inserting a copy of STMT before STMT, and then removing the
2191 original stmt, at which time gsi_remove() would have update
2192 annotations, but using this function saves all the inserting,
2193 copying and removing. */
2196 gimple_replace_lhs (gimple stmt, tree nlhs)
2198 if (MAY_HAVE_DEBUG_STMTS)
2200 tree lhs = gimple_get_lhs (stmt);
2202 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2204 insert_debug_temp_for_var_def (NULL, lhs);
2207 gimple_set_lhs (stmt, nlhs);
2210 /* Return a deep copy of statement STMT. All the operands from STMT
2211 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2212 and VUSE operand arrays are set to empty in the new copy. */
2215 gimple_copy (gimple stmt)
2217 enum gimple_code code = gimple_code (stmt);
2218 unsigned num_ops = gimple_num_ops (stmt);
2219 gimple copy = gimple_alloc (code, num_ops);
2222 /* Shallow copy all the fields from STMT. */
2223 memcpy (copy, stmt, gimple_size (code));
2225 /* If STMT has sub-statements, deep-copy them as well. */
2226 if (gimple_has_substatements (stmt))
2231 switch (gimple_code (stmt))
2234 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2235 gimple_bind_set_body (copy, new_seq);
2236 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2237 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2241 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2242 gimple_catch_set_handler (copy, new_seq);
2243 t = unshare_expr (gimple_catch_types (stmt));
2244 gimple_catch_set_types (copy, t);
2247 case GIMPLE_EH_FILTER:
2248 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2249 gimple_eh_filter_set_failure (copy, new_seq);
2250 t = unshare_expr (gimple_eh_filter_types (stmt));
2251 gimple_eh_filter_set_types (copy, t);
2255 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2256 gimple_try_set_eval (copy, new_seq);
2257 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2258 gimple_try_set_cleanup (copy, new_seq);
2261 case GIMPLE_OMP_FOR:
2262 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2263 gimple_omp_for_set_pre_body (copy, new_seq);
2264 t = unshare_expr (gimple_omp_for_clauses (stmt));
2265 gimple_omp_for_set_clauses (copy, t);
2266 copy->gimple_omp_for.iter
2267 = ggc_alloc_vec_gimple_omp_for_iter
2268 (gimple_omp_for_collapse (stmt));
2269 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2271 gimple_omp_for_set_cond (copy, i,
2272 gimple_omp_for_cond (stmt, i));
2273 gimple_omp_for_set_index (copy, i,
2274 gimple_omp_for_index (stmt, i));
2275 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2276 gimple_omp_for_set_initial (copy, i, t);
2277 t = unshare_expr (gimple_omp_for_final (stmt, i));
2278 gimple_omp_for_set_final (copy, i, t);
2279 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2280 gimple_omp_for_set_incr (copy, i, t);
2284 case GIMPLE_OMP_PARALLEL:
2285 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2286 gimple_omp_parallel_set_clauses (copy, t);
2287 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2288 gimple_omp_parallel_set_child_fn (copy, t);
2289 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2290 gimple_omp_parallel_set_data_arg (copy, t);
2293 case GIMPLE_OMP_TASK:
2294 t = unshare_expr (gimple_omp_task_clauses (stmt));
2295 gimple_omp_task_set_clauses (copy, t);
2296 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2297 gimple_omp_task_set_child_fn (copy, t);
2298 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2299 gimple_omp_task_set_data_arg (copy, t);
2300 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2301 gimple_omp_task_set_copy_fn (copy, t);
2302 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2303 gimple_omp_task_set_arg_size (copy, t);
2304 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2305 gimple_omp_task_set_arg_align (copy, t);
2308 case GIMPLE_OMP_CRITICAL:
2309 t = unshare_expr (gimple_omp_critical_name (stmt));
2310 gimple_omp_critical_set_name (copy, t);
2313 case GIMPLE_OMP_SECTIONS:
2314 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2315 gimple_omp_sections_set_clauses (copy, t);
2316 t = unshare_expr (gimple_omp_sections_control (stmt));
2317 gimple_omp_sections_set_control (copy, t);
2320 case GIMPLE_OMP_SINGLE:
2321 case GIMPLE_OMP_SECTION:
2322 case GIMPLE_OMP_MASTER:
2323 case GIMPLE_OMP_ORDERED:
2325 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2326 gimple_omp_set_body (copy, new_seq);
2329 case GIMPLE_WITH_CLEANUP_EXPR:
2330 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2331 gimple_wce_set_cleanup (copy, new_seq);
2339 /* Make copy of operands. */
2342 for (i = 0; i < num_ops; i++)
2343 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2345 /* Clear out SSA operand vectors on COPY. */
2346 if (gimple_has_ops (stmt))
2348 gimple_set_def_ops (copy, NULL);
2349 gimple_set_use_ops (copy, NULL);
2352 if (gimple_has_mem_ops (stmt))
2354 gimple_set_vdef (copy, gimple_vdef (stmt));
2355 gimple_set_vuse (copy, gimple_vuse (stmt));
2358 /* SSA operands need to be updated. */
2359 gimple_set_modified (copy, true);
2366 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2367 a MODIFIED field. */
2370 gimple_set_modified (gimple s, bool modifiedp)
2372 if (gimple_has_ops (s))
2373 s->gsbase.modified = (unsigned) modifiedp;
2377 /* Return true if statement S has side-effects. We consider a
2378 statement to have side effects if:
2380 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2381 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2384 gimple_has_side_effects (const_gimple s)
2388 if (is_gimple_debug (s))
2391 /* We don't have to scan the arguments to check for
2392 volatile arguments, though, at present, we still
2393 do a scan to check for TREE_SIDE_EFFECTS. */
2394 if (gimple_has_volatile_ops (s))
2397 if (gimple_code (s) == GIMPLE_ASM
2398 && gimple_asm_volatile_p (s))
2401 if (is_gimple_call (s))
2403 unsigned nargs = gimple_call_num_args (s);
2406 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2408 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2409 /* An infinite loop is considered a side effect. */
2412 if (gimple_call_lhs (s)
2413 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2415 gcc_checking_assert (gimple_has_volatile_ops (s));
2419 fn = gimple_call_fn (s);
2420 if (fn && TREE_SIDE_EFFECTS (fn))
2423 for (i = 0; i < nargs; i++)
2424 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2426 gcc_checking_assert (gimple_has_volatile_ops (s));
2434 for (i = 0; i < gimple_num_ops (s); i++)
2436 tree op = gimple_op (s, i);
2437 if (op && TREE_SIDE_EFFECTS (op))
2439 gcc_checking_assert (gimple_has_volatile_ops (s));
2448 /* Return true if the RHS of statement S has side effects.
2449 We may use it to determine if it is admissable to replace
2450 an assignment or call with a copy of a previously-computed
2451 value. In such cases, side-effects due to the LHS are
2455 gimple_rhs_has_side_effects (const_gimple s)
2459 if (is_gimple_call (s))
2461 unsigned nargs = gimple_call_num_args (s);
2464 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2467 /* We cannot use gimple_has_volatile_ops here,
2468 because we must ignore a volatile LHS. */
2469 fn = gimple_call_fn (s);
2470 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2472 gcc_assert (gimple_has_volatile_ops (s));
2476 for (i = 0; i < nargs; i++)
2477 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2478 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2483 else if (is_gimple_assign (s))
2485 /* Skip the first operand, the LHS. */
2486 for (i = 1; i < gimple_num_ops (s); i++)
2487 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2488 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2490 gcc_assert (gimple_has_volatile_ops (s));
2494 else if (is_gimple_debug (s))
2498 /* For statements without an LHS, examine all arguments. */
2499 for (i = 0; i < gimple_num_ops (s); i++)
2500 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2501 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2503 gcc_assert (gimple_has_volatile_ops (s));
2511 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2512 Return true if S can trap. When INCLUDE_MEM is true, check whether
2513 the memory operations could trap. When INCLUDE_STORES is true and
2514 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2517 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2519 tree t, div = NULL_TREE;
2524 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2526 for (i = start; i < gimple_num_ops (s); i++)
2527 if (tree_could_trap_p (gimple_op (s, i)))
2531 switch (gimple_code (s))
2534 return gimple_asm_volatile_p (s);
2537 t = gimple_call_fndecl (s);
2538 /* Assume that calls to weak functions may trap. */
2539 if (!t || !DECL_P (t) || DECL_WEAK (t))
2544 t = gimple_expr_type (s);
2545 op = gimple_assign_rhs_code (s);
2546 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2547 div = gimple_assign_rhs2 (s);
2548 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2549 (INTEGRAL_TYPE_P (t)
2550 && TYPE_OVERFLOW_TRAPS (t)),
2560 /* Return true if statement S can trap. */
2563 gimple_could_trap_p (gimple s)
2565 return gimple_could_trap_p_1 (s, true, true);
2568 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2571 gimple_assign_rhs_could_trap_p (gimple s)
2573 gcc_assert (is_gimple_assign (s));
2574 return gimple_could_trap_p_1 (s, true, false);
2578 /* Print debugging information for gimple stmts generated. */
2581 dump_gimple_statistics (void)
2583 #ifdef GATHER_STATISTICS
2584 int i, total_tuples = 0, total_bytes = 0;
2586 fprintf (stderr, "\nGIMPLE statements\n");
2587 fprintf (stderr, "Kind Stmts Bytes\n");
2588 fprintf (stderr, "---------------------------------------\n");
2589 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2591 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2592 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2593 total_tuples += gimple_alloc_counts[i];
2594 total_bytes += gimple_alloc_sizes[i];
2596 fprintf (stderr, "---------------------------------------\n");
2597 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2598 fprintf (stderr, "---------------------------------------\n");
2600 fprintf (stderr, "No gimple statistics\n");
2605 /* Return the number of operands needed on the RHS of a GIMPLE
2606 assignment for an expression with tree code CODE. */
2609 get_gimple_rhs_num_ops (enum tree_code code)
2611 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2613 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2615 else if (rhs_class == GIMPLE_BINARY_RHS)
2617 else if (rhs_class == GIMPLE_TERNARY_RHS)
2623 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2625 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2626 : ((TYPE) == tcc_binary \
2627 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2628 : ((TYPE) == tcc_constant \
2629 || (TYPE) == tcc_declaration \
2630 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2631 : ((SYM) == TRUTH_AND_EXPR \
2632 || (SYM) == TRUTH_OR_EXPR \
2633 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2634 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2635 : ((SYM) == COND_EXPR \
2636 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2637 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2638 || (SYM) == DOT_PROD_EXPR \
2639 || (SYM) == REALIGN_LOAD_EXPR \
2640 || (SYM) == VEC_COND_EXPR \
2641 || (SYM) == VEC_SHUFFLE_EXPR \
2642 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2643 : ((SYM) == CONSTRUCTOR \
2644 || (SYM) == OBJ_TYPE_REF \
2645 || (SYM) == ASSERT_EXPR \
2646 || (SYM) == ADDR_EXPR \
2647 || (SYM) == WITH_SIZE_EXPR \
2648 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2649 : GIMPLE_INVALID_RHS),
2650 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2652 const unsigned char gimple_rhs_class_table[] = {
2653 #include "all-tree.def"
2657 #undef END_OF_BASE_TREE_CODES
2659 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2661 /* Validation of GIMPLE expressions. */
2663 /* Returns true iff T is a valid RHS for an assignment to a renamed
2664 user -- or front-end generated artificial -- variable. */
2667 is_gimple_reg_rhs (tree t)
2669 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2672 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2673 LHS, or for a call argument. */
2676 is_gimple_mem_rhs (tree t)
2678 /* If we're dealing with a renamable type, either source or dest must be
2679 a renamed variable. */
2680 if (is_gimple_reg_type (TREE_TYPE (t)))
2681 return is_gimple_val (t);
2683 return is_gimple_val (t) || is_gimple_lvalue (t);
2686 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2689 is_gimple_lvalue (tree t)
2691 return (is_gimple_addressable (t)
2692 || TREE_CODE (t) == WITH_SIZE_EXPR
2693 /* These are complex lvalues, but don't have addresses, so they
2695 || TREE_CODE (t) == BIT_FIELD_REF);
2698 /* Return true if T is a GIMPLE condition. */
2701 is_gimple_condexpr (tree t)
2703 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2704 && !tree_could_throw_p (t)
2705 && is_gimple_val (TREE_OPERAND (t, 0))
2706 && is_gimple_val (TREE_OPERAND (t, 1))));
2709 /* Return true if T is something whose address can be taken. */
2712 is_gimple_addressable (tree t)
2714 return (is_gimple_id (t) || handled_component_p (t)
2715 || TREE_CODE (t) == MEM_REF);
2718 /* Return true if T is a valid gimple constant. */
2721 is_gimple_constant (const_tree t)
2723 switch (TREE_CODE (t))
2733 /* Vector constant constructors are gimple invariant. */
2735 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2736 return TREE_CONSTANT (t);
2745 /* Return true if T is a gimple address. */
2748 is_gimple_address (const_tree t)
2752 if (TREE_CODE (t) != ADDR_EXPR)
2755 op = TREE_OPERAND (t, 0);
2756 while (handled_component_p (op))
2758 if ((TREE_CODE (op) == ARRAY_REF
2759 || TREE_CODE (op) == ARRAY_RANGE_REF)
2760 && !is_gimple_val (TREE_OPERAND (op, 1)))
2763 op = TREE_OPERAND (op, 0);
2766 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2769 switch (TREE_CODE (op))
2784 /* Strip out all handled components that produce invariant
2788 strip_invariant_refs (const_tree op)
2790 while (handled_component_p (op))
2792 switch (TREE_CODE (op))
2795 case ARRAY_RANGE_REF:
2796 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2797 || TREE_OPERAND (op, 2) != NULL_TREE
2798 || TREE_OPERAND (op, 3) != NULL_TREE)
2803 if (TREE_OPERAND (op, 2) != NULL_TREE)
2809 op = TREE_OPERAND (op, 0);
2815 /* Return true if T is a gimple invariant address. */
2818 is_gimple_invariant_address (const_tree t)
2822 if (TREE_CODE (t) != ADDR_EXPR)
2825 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2829 if (TREE_CODE (op) == MEM_REF)
2831 const_tree op0 = TREE_OPERAND (op, 0);
2832 return (TREE_CODE (op0) == ADDR_EXPR
2833 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2834 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2837 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2840 /* Return true if T is a gimple invariant address at IPA level
2841 (so addresses of variables on stack are not allowed). */
2844 is_gimple_ip_invariant_address (const_tree t)
2848 if (TREE_CODE (t) != ADDR_EXPR)
2851 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2853 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2856 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2857 form of function invariant. */
2860 is_gimple_min_invariant (const_tree t)
2862 if (TREE_CODE (t) == ADDR_EXPR)
2863 return is_gimple_invariant_address (t);
2865 return is_gimple_constant (t);
2868 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2869 form of gimple minimal invariant. */
2872 is_gimple_ip_invariant (const_tree t)
2874 if (TREE_CODE (t) == ADDR_EXPR)
2875 return is_gimple_ip_invariant_address (t);
2877 return is_gimple_constant (t);
2880 /* Return true if T looks like a valid GIMPLE statement. */
2883 is_gimple_stmt (tree t)
2885 const enum tree_code code = TREE_CODE (t);
2890 /* The only valid NOP_EXPR is the empty statement. */
2891 return IS_EMPTY_STMT (t);
2895 /* These are only valid if they're void. */
2896 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2902 case CASE_LABEL_EXPR:
2903 case TRY_CATCH_EXPR:
2904 case TRY_FINALLY_EXPR:
2905 case EH_FILTER_EXPR:
2908 case STATEMENT_LIST:
2918 /* These are always void. */
2924 /* These are valid regardless of their type. */
2932 /* Return true if T is a variable. */
2935 is_gimple_variable (tree t)
2937 return (TREE_CODE (t) == VAR_DECL
2938 || TREE_CODE (t) == PARM_DECL
2939 || TREE_CODE (t) == RESULT_DECL
2940 || TREE_CODE (t) == SSA_NAME);
2943 /* Return true if T is a GIMPLE identifier (something with an address). */
2946 is_gimple_id (tree t)
2948 return (is_gimple_variable (t)
2949 || TREE_CODE (t) == FUNCTION_DECL
2950 || TREE_CODE (t) == LABEL_DECL
2951 || TREE_CODE (t) == CONST_DECL
2952 /* Allow string constants, since they are addressable. */
2953 || TREE_CODE (t) == STRING_CST);
2956 /* Return true if TYPE is a suitable type for a scalar register variable. */
2959 is_gimple_reg_type (tree type)
2961 return !AGGREGATE_TYPE_P (type);
2964 /* Return true if T is a non-aggregate register variable. */
2967 is_gimple_reg (tree t)
2969 if (TREE_CODE (t) == SSA_NAME)
2970 t = SSA_NAME_VAR (t);
2972 if (!is_gimple_variable (t))
2975 if (!is_gimple_reg_type (TREE_TYPE (t)))
2978 /* A volatile decl is not acceptable because we can't reuse it as
2979 needed. We need to copy it into a temp first. */
2980 if (TREE_THIS_VOLATILE (t))
2983 /* We define "registers" as things that can be renamed as needed,
2984 which with our infrastructure does not apply to memory. */
2985 if (needs_to_live_in_memory (t))
2988 /* Hard register variables are an interesting case. For those that
2989 are call-clobbered, we don't know where all the calls are, since
2990 we don't (want to) take into account which operations will turn
2991 into libcalls at the rtl level. For those that are call-saved,
2992 we don't currently model the fact that calls may in fact change
2993 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2994 level, and so miss variable changes that might imply. All around,
2995 it seems safest to not do too much optimization with these at the
2996 tree level at all. We'll have to rely on the rtl optimizers to
2997 clean this up, as there we've got all the appropriate bits exposed. */
2998 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3001 /* Complex and vector values must have been put into SSA-like form.
3002 That is, no assignments to the individual components. */
3003 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
3004 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3005 return DECL_GIMPLE_REG_P (t);
3011 /* Return true if T is a GIMPLE variable whose address is not needed. */
3014 is_gimple_non_addressable (tree t)
3016 if (TREE_CODE (t) == SSA_NAME)
3017 t = SSA_NAME_VAR (t);
3019 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
3022 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
3025 is_gimple_val (tree t)
3027 /* Make loads from volatiles and memory vars explicit. */
3028 if (is_gimple_variable (t)
3029 && is_gimple_reg_type (TREE_TYPE (t))
3030 && !is_gimple_reg (t))
3033 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
3036 /* Similarly, but accept hard registers as inputs to asm statements. */
3039 is_gimple_asm_val (tree t)
3041 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3044 return is_gimple_val (t);
3047 /* Return true if T is a GIMPLE minimal lvalue. */
3050 is_gimple_min_lval (tree t)
3052 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3054 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3057 /* Return true if T is a valid function operand of a CALL_EXPR. */
3060 is_gimple_call_addr (tree t)
3062 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3065 /* Return true if T is a valid address operand of a MEM_REF. */
3068 is_gimple_mem_ref_addr (tree t)
3070 return (is_gimple_reg (t)
3071 || TREE_CODE (t) == INTEGER_CST
3072 || (TREE_CODE (t) == ADDR_EXPR
3073 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3074 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3077 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3078 Otherwise, return NULL_TREE. */
3081 get_call_expr_in (tree t)
3083 if (TREE_CODE (t) == MODIFY_EXPR)
3084 t = TREE_OPERAND (t, 1);
3085 if (TREE_CODE (t) == WITH_SIZE_EXPR)
3086 t = TREE_OPERAND (t, 0);
3087 if (TREE_CODE (t) == CALL_EXPR)
3093 /* Given a memory reference expression T, return its base address.
3094 The base address of a memory reference expression is the main
3095 object being referenced. For instance, the base address for
3096 'array[i].fld[j]' is 'array'. You can think of this as stripping
3097 away the offset part from a memory address.
3099 This function calls handled_component_p to strip away all the inner
3100 parts of the memory reference until it reaches the base object. */
3103 get_base_address (tree t)
3105 while (handled_component_p (t))
3106 t = TREE_OPERAND (t, 0);
3108 if ((TREE_CODE (t) == MEM_REF
3109 || TREE_CODE (t) == TARGET_MEM_REF)
3110 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3111 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3113 if (TREE_CODE (t) == SSA_NAME
3115 || TREE_CODE (t) == STRING_CST
3116 || TREE_CODE (t) == CONSTRUCTOR
3117 || INDIRECT_REF_P (t)
3118 || TREE_CODE (t) == MEM_REF
3119 || TREE_CODE (t) == TARGET_MEM_REF)
3126 recalculate_side_effects (tree t)
3128 enum tree_code code = TREE_CODE (t);
3129 int len = TREE_OPERAND_LENGTH (t);
3132 switch (TREE_CODE_CLASS (code))
3134 case tcc_expression:
3140 case PREDECREMENT_EXPR:
3141 case PREINCREMENT_EXPR:
3142 case POSTDECREMENT_EXPR:
3143 case POSTINCREMENT_EXPR:
3144 /* All of these have side-effects, no matter what their
3153 case tcc_comparison: /* a comparison expression */
3154 case tcc_unary: /* a unary arithmetic expression */
3155 case tcc_binary: /* a binary arithmetic expression */
3156 case tcc_reference: /* a reference */
3157 case tcc_vl_exp: /* a function call */
3158 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3159 for (i = 0; i < len; ++i)
3161 tree op = TREE_OPERAND (t, i);
3162 if (op && TREE_SIDE_EFFECTS (op))
3163 TREE_SIDE_EFFECTS (t) = 1;
3168 /* No side-effects. */
3176 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3177 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3178 we failed to create one. */
3181 canonicalize_cond_expr_cond (tree t)
3183 /* Strip conversions around boolean operations. */
3184 if (CONVERT_EXPR_P (t)
3185 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3186 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3188 t = TREE_OPERAND (t, 0);
3190 /* For !x use x == 0. */
3191 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3193 tree top0 = TREE_OPERAND (t, 0);
3194 t = build2 (EQ_EXPR, TREE_TYPE (t),
3195 top0, build_int_cst (TREE_TYPE (top0), 0));
3197 /* For cmp ? 1 : 0 use cmp. */
3198 else if (TREE_CODE (t) == COND_EXPR
3199 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3200 && integer_onep (TREE_OPERAND (t, 1))
3201 && integer_zerop (TREE_OPERAND (t, 2)))
3203 tree top0 = TREE_OPERAND (t, 0);
3204 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3205 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3208 if (is_gimple_condexpr (t))
3214 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3215 the positions marked by the set ARGS_TO_SKIP. */
3218 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3221 int nargs = gimple_call_num_args (stmt);
3222 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3225 for (i = 0; i < nargs; i++)
3226 if (!bitmap_bit_p (args_to_skip, i))
3227 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3229 if (gimple_call_internal_p (stmt))
3230 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3233 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3234 VEC_free (tree, heap, vargs);
3235 if (gimple_call_lhs (stmt))
3236 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3238 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3239 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3241 gimple_set_block (new_stmt, gimple_block (stmt));
3242 if (gimple_has_location (stmt))
3243 gimple_set_location (new_stmt, gimple_location (stmt));
3244 gimple_call_copy_flags (new_stmt, stmt);
3245 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3247 gimple_set_modified (new_stmt, true);
3253 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3255 static hashval_t gimple_type_hash (const void *);
3257 /* Structure used to maintain a cache of some type pairs compared by
3258 gimple_types_compatible_p when comparing aggregate types. There are
3259 three possible values for SAME_P:
3261 -2: The pair (T1, T2) has just been inserted in the table.
3262 0: T1 and T2 are different types.
3263 1: T1 and T2 are the same type.
3265 The two elements in the SAME_P array are indexed by the comparison
3272 signed char same_p[2];
3274 typedef struct type_pair_d *type_pair_t;
3275 DEF_VEC_P(type_pair_t);
3276 DEF_VEC_ALLOC_P(type_pair_t,heap);
3278 #define GIMPLE_TYPE_PAIR_SIZE 16381
3279 struct type_pair_d *type_pair_cache;
3282 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3283 entry if none existed. */
3285 static inline type_pair_t
3286 lookup_type_pair (tree t1, tree t2)
3289 unsigned int uid1, uid2;
3291 if (type_pair_cache == NULL)
3292 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3294 if (TYPE_UID (t1) < TYPE_UID (t2))
3296 uid1 = TYPE_UID (t1);
3297 uid2 = TYPE_UID (t2);
3301 uid1 = TYPE_UID (t2);
3302 uid2 = TYPE_UID (t1);
3304 gcc_checking_assert (uid1 != uid2);
3306 /* iterative_hash_hashval_t imply an function calls.
3307 We know that UIDS are in limited range. */
3308 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3309 % GIMPLE_TYPE_PAIR_SIZE);
3310 if (type_pair_cache [index].uid1 == uid1
3311 && type_pair_cache [index].uid2 == uid2)
3312 return &type_pair_cache[index];
3314 type_pair_cache [index].uid1 = uid1;
3315 type_pair_cache [index].uid2 = uid2;
3316 type_pair_cache [index].same_p[0] = -2;
3317 type_pair_cache [index].same_p[1] = -2;
3319 return &type_pair_cache[index];
3322 /* Per pointer state for the SCC finding. The on_sccstack flag
3323 is not strictly required, it is true when there is no hash value
3324 recorded for the type and false otherwise. But querying that
3329 unsigned int dfsnum;
3338 static unsigned int next_dfs_num;
3339 static unsigned int gtc_next_dfs_num;
3342 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3344 typedef struct GTY(()) gimple_type_leader_entry_s {
3347 } gimple_type_leader_entry;
3349 #define GIMPLE_TYPE_LEADER_SIZE 16381
3350 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3351 gimple_type_leader_entry *gimple_type_leader;
3353 /* Lookup an existing leader for T and return it or NULL_TREE, if
3354 there is none in the cache. */
3357 gimple_lookup_type_leader (tree t)
3359 gimple_type_leader_entry *leader;
3361 if (!gimple_type_leader)
3364 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3365 if (leader->type != t)
3368 return leader->leader;
3371 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3372 true then if any type has no name return false, otherwise return
3373 true if both types have no names. */
3376 compare_type_names_p (tree t1, tree t2)
3378 tree name1 = TYPE_NAME (t1);
3379 tree name2 = TYPE_NAME (t2);
3381 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3382 name1 = DECL_NAME (name1);
3383 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3385 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3386 name2 = DECL_NAME (name2);
3387 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3389 /* Identifiers can be compared with pointer equality rather
3390 than a string comparison. */
3397 /* Return true if the field decls F1 and F2 are at the same offset.
3399 This is intended to be used on GIMPLE types only. */
3402 gimple_compare_field_offset (tree f1, tree f2)
3404 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3406 tree offset1 = DECL_FIELD_OFFSET (f1);
3407 tree offset2 = DECL_FIELD_OFFSET (f2);
3408 return ((offset1 == offset2
3409 /* Once gimplification is done, self-referential offsets are
3410 instantiated as operand #2 of the COMPONENT_REF built for
3411 each access and reset. Therefore, they are not relevant
3412 anymore and fields are interchangeable provided that they
3413 represent the same access. */
3414 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3415 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3416 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3417 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3418 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3419 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3420 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3421 || operand_equal_p (offset1, offset2, 0))
3422 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3423 DECL_FIELD_BIT_OFFSET (f2)));
3426 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3427 should be, so handle differing ones specially by decomposing
3428 the offset into a byte and bit offset manually. */
3429 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3430 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3432 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3433 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3434 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3435 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3436 + bit_offset1 / BITS_PER_UNIT);
3437 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3438 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3439 + bit_offset2 / BITS_PER_UNIT);
3440 if (byte_offset1 != byte_offset2)
3442 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3449 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3450 VEC(type_pair_t, heap) **,
3451 struct pointer_map_t *, struct obstack *);
3453 /* DFS visit the edge from the callers type pair with state *STATE to
3454 the pair T1, T2 while operating in FOR_MERGING_P mode.
3455 Update the merging status if it is not part of the SCC containing the
3456 callers pair and return it.
3457 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3460 gtc_visit (tree t1, tree t2,
3462 VEC(type_pair_t, heap) **sccstack,
3463 struct pointer_map_t *sccstate,
3464 struct obstack *sccstate_obstack)
3466 struct sccs *cstate = NULL;
3469 tree leader1, leader2;
3471 /* Check first for the obvious case of pointer identity. */
3475 /* Check that we have two types to compare. */
3476 if (t1 == NULL_TREE || t2 == NULL_TREE)
3479 /* Can't be the same type if the types don't have the same code. */
3480 if (TREE_CODE (t1) != TREE_CODE (t2))
3483 /* Can't be the same type if they have different CV qualifiers. */
3484 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3487 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3490 /* Void types and nullptr types are always the same. */
3491 if (TREE_CODE (t1) == VOID_TYPE
3492 || TREE_CODE (t1) == NULLPTR_TYPE)
3495 /* Can't be the same type if they have different alignment or mode. */
3496 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3497 || TYPE_MODE (t1) != TYPE_MODE (t2))
3500 /* Do some simple checks before doing three hashtable queries. */
3501 if (INTEGRAL_TYPE_P (t1)
3502 || SCALAR_FLOAT_TYPE_P (t1)
3503 || FIXED_POINT_TYPE_P (t1)
3504 || TREE_CODE (t1) == VECTOR_TYPE
3505 || TREE_CODE (t1) == COMPLEX_TYPE
3506 || TREE_CODE (t1) == OFFSET_TYPE
3507 || POINTER_TYPE_P (t1))
3509 /* Can't be the same type if they have different sign or precision. */
3510 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3511 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3514 if (TREE_CODE (t1) == INTEGER_TYPE
3515 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3516 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3519 /* That's all we need to check for float and fixed-point types. */
3520 if (SCALAR_FLOAT_TYPE_P (t1)
3521 || FIXED_POINT_TYPE_P (t1))
3524 /* For other types fall thru to more complex checks. */
3527 /* If the types have been previously registered and found equal
3529 leader1 = gimple_lookup_type_leader (t1);
3530 leader2 = gimple_lookup_type_leader (t2);
3533 || (leader1 && leader1 == leader2))
3536 /* If the hash values of t1 and t2 are different the types can't
3537 possibly be the same. This helps keeping the type-pair hashtable
3538 small, only tracking comparisons for hash collisions. */
3539 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3542 /* Allocate a new cache entry for this comparison. */
3543 p = lookup_type_pair (t1, t2);
3544 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3546 /* We have already decided whether T1 and T2 are the
3547 same, return the cached result. */
3548 return p->same_p[GTC_MERGE] == 1;
3551 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3552 cstate = (struct sccs *)*slot;
3553 /* Not yet visited. DFS recurse. */
3556 gimple_types_compatible_p_1 (t1, t2, p,
3557 sccstack, sccstate, sccstate_obstack);
3558 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3559 state->low = MIN (state->low, cstate->low);
3561 /* If the type is still on the SCC stack adjust the parents low. */
3562 if (cstate->dfsnum < state->dfsnum
3563 && cstate->on_sccstack)
3564 state->low = MIN (cstate->dfsnum, state->low);
3566 /* Return the current lattice value. We start with an equality
3567 assumption so types part of a SCC will be optimistically
3568 treated equal unless proven otherwise. */
3569 return cstate->u.same_p;
3572 /* Worker for gimple_types_compatible.
3573 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3576 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3577 VEC(type_pair_t, heap) **sccstack,
3578 struct pointer_map_t *sccstate,
3579 struct obstack *sccstate_obstack)
3583 gcc_assert (p->same_p[GTC_MERGE] == -2);
3585 state = XOBNEW (sccstate_obstack, struct sccs);
3586 *pointer_map_insert (sccstate, p) = state;
3588 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3589 state->dfsnum = gtc_next_dfs_num++;
3590 state->low = state->dfsnum;
3591 state->on_sccstack = true;
3592 /* Start with an equality assumption. As we DFS recurse into child
3593 SCCs this assumption may get revisited. */
3594 state->u.same_p = 1;
3596 /* The struct tags shall compare equal. */
3597 if (!compare_type_names_p (t1, t2))
3598 goto different_types;
3600 /* If their attributes are not the same they can't be the same type. */
3601 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3602 goto different_types;
3604 /* Do type-specific comparisons. */
3605 switch (TREE_CODE (t1))
3609 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3610 state, sccstack, sccstate, sccstate_obstack))
3611 goto different_types;
3615 /* Array types are the same if the element types are the same and
3616 the number of elements are the same. */
3617 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3618 state, sccstack, sccstate, sccstate_obstack)
3619 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3620 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3621 goto different_types;
3624 tree i1 = TYPE_DOMAIN (t1);
3625 tree i2 = TYPE_DOMAIN (t2);
3627 /* For an incomplete external array, the type domain can be
3628 NULL_TREE. Check this condition also. */
3629 if (i1 == NULL_TREE && i2 == NULL_TREE)
3631 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3632 goto different_types;
3633 /* If for a complete array type the possibly gimplified sizes
3634 are different the types are different. */
3635 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3638 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3639 goto different_types;
3642 tree min1 = TYPE_MIN_VALUE (i1);
3643 tree min2 = TYPE_MIN_VALUE (i2);
3644 tree max1 = TYPE_MAX_VALUE (i1);
3645 tree max2 = TYPE_MAX_VALUE (i2);
3647 /* The minimum/maximum values have to be the same. */
3650 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3651 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3652 || operand_equal_p (min1, min2, 0))))
3655 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3656 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3657 || operand_equal_p (max1, max2, 0)))))
3660 goto different_types;
3665 /* Method types should belong to the same class. */
3666 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3667 state, sccstack, sccstate, sccstate_obstack))
3668 goto different_types;
3673 /* Function types are the same if the return type and arguments types
3675 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3676 state, sccstack, sccstate, sccstate_obstack))
3677 goto different_types;
3679 if (!comp_type_attributes (t1, t2))
3680 goto different_types;
3682 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3686 tree parms1, parms2;
3688 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3690 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3692 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3693 state, sccstack, sccstate, sccstate_obstack))
3694 goto different_types;
3697 if (parms1 || parms2)
3698 goto different_types;
3705 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3706 state, sccstack, sccstate, sccstate_obstack)
3707 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3708 TYPE_OFFSET_BASETYPE (t2),
3709 state, sccstack, sccstate, sccstate_obstack))
3710 goto different_types;
3716 case REFERENCE_TYPE:
3718 /* If the two pointers have different ref-all attributes,
3719 they can't be the same type. */
3720 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3721 goto different_types;
3723 /* Otherwise, pointer and reference types are the same if the
3724 pointed-to types are the same. */
3725 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3726 state, sccstack, sccstate, sccstate_obstack))
3729 goto different_types;
3735 tree min1 = TYPE_MIN_VALUE (t1);
3736 tree max1 = TYPE_MAX_VALUE (t1);
3737 tree min2 = TYPE_MIN_VALUE (t2);
3738 tree max2 = TYPE_MAX_VALUE (t2);
3739 bool min_equal_p = false;
3740 bool max_equal_p = false;
3742 /* If either type has a minimum value, the other type must
3744 if (min1 == NULL_TREE && min2 == NULL_TREE)
3746 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3749 /* Likewise, if either type has a maximum value, the other
3750 type must have the same. */
3751 if (max1 == NULL_TREE && max2 == NULL_TREE)
3753 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3756 if (!min_equal_p || !max_equal_p)
3757 goto different_types;
3764 /* FIXME lto, we cannot check bounds on enumeral types because
3765 different front ends will produce different values.
3766 In C, enumeral types are integers, while in C++ each element
3767 will have its own symbolic value. We should decide how enums
3768 are to be represented in GIMPLE and have each front end lower
3772 /* For enumeral types, all the values must be the same. */
3773 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3776 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3778 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3780 tree c1 = TREE_VALUE (v1);
3781 tree c2 = TREE_VALUE (v2);
3783 if (TREE_CODE (c1) == CONST_DECL)
3784 c1 = DECL_INITIAL (c1);
3786 if (TREE_CODE (c2) == CONST_DECL)
3787 c2 = DECL_INITIAL (c2);
3789 if (tree_int_cst_equal (c1, c2) != 1)
3790 goto different_types;
3792 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3793 goto different_types;
3796 /* If one enumeration has more values than the other, they
3797 are not the same. */
3799 goto different_types;
3806 case QUAL_UNION_TYPE:
3810 /* For aggregate types, all the fields must be the same. */
3811 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3813 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3815 /* Different field kinds are not compatible. */
3816 if (TREE_CODE (f1) != TREE_CODE (f2))
3817 goto different_types;
3818 /* Field decls must have the same name and offset. */
3819 if (TREE_CODE (f1) == FIELD_DECL
3820 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3821 || !gimple_compare_field_offset (f1, f2)))
3822 goto different_types;
3823 /* All entities should have the same name and type. */
3824 if (DECL_NAME (f1) != DECL_NAME (f2)
3825 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3826 state, sccstack, sccstate, sccstate_obstack))
3827 goto different_types;
3830 /* If one aggregate has more fields than the other, they
3831 are not the same. */
3833 goto different_types;
3842 /* Common exit path for types that are not compatible. */
3844 state->u.same_p = 0;
3847 /* Common exit path for types that are compatible. */
3849 gcc_assert (state->u.same_p == 1);
3852 if (state->low == state->dfsnum)
3856 /* Pop off the SCC and set its cache values to the final
3857 comparison result. */
3860 struct sccs *cstate;
3861 x = VEC_pop (type_pair_t, *sccstack);
3862 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3863 cstate->on_sccstack = false;
3864 x->same_p[GTC_MERGE] = state->u.same_p;
3869 return state->u.same_p;
3872 /* Return true iff T1 and T2 are structurally identical. When
3873 FOR_MERGING_P is true the an incomplete type and a complete type
3874 are considered different, otherwise they are considered compatible. */
3877 gimple_types_compatible_p (tree t1, tree t2)
3879 VEC(type_pair_t, heap) *sccstack = NULL;
3880 struct pointer_map_t *sccstate;
3881 struct obstack sccstate_obstack;
3882 type_pair_t p = NULL;
3884 tree leader1, leader2;
3886 /* Before starting to set up the SCC machinery handle simple cases. */
3888 /* Check first for the obvious case of pointer identity. */
3892 /* Check that we have two types to compare. */
3893 if (t1 == NULL_TREE || t2 == NULL_TREE)
3896 /* Can't be the same type if the types don't have the same code. */
3897 if (TREE_CODE (t1) != TREE_CODE (t2))
3900 /* Can't be the same type if they have different CV qualifiers. */
3901 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3904 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3907 /* Void types and nullptr types are always the same. */
3908 if (TREE_CODE (t1) == VOID_TYPE
3909 || TREE_CODE (t1) == NULLPTR_TYPE)
3912 /* Can't be the same type if they have different alignment or mode. */
3913 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3914 || TYPE_MODE (t1) != TYPE_MODE (t2))
3917 /* Do some simple checks before doing three hashtable queries. */
3918 if (INTEGRAL_TYPE_P (t1)
3919 || SCALAR_FLOAT_TYPE_P (t1)
3920 || FIXED_POINT_TYPE_P (t1)
3921 || TREE_CODE (t1) == VECTOR_TYPE
3922 || TREE_CODE (t1) == COMPLEX_TYPE
3923 || TREE_CODE (t1) == OFFSET_TYPE
3924 || POINTER_TYPE_P (t1))
3926 /* Can't be the same type if they have different sign or precision. */
3927 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3928 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3931 if (TREE_CODE (t1) == INTEGER_TYPE
3932 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3933 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3936 /* That's all we need to check for float and fixed-point types. */
3937 if (SCALAR_FLOAT_TYPE_P (t1)
3938 || FIXED_POINT_TYPE_P (t1))
3941 /* For other types fall thru to more complex checks. */
3944 /* If the types have been previously registered and found equal
3946 leader1 = gimple_lookup_type_leader (t1);
3947 leader2 = gimple_lookup_type_leader (t2);
3950 || (leader1 && leader1 == leader2))
3953 /* If the hash values of t1 and t2 are different the types can't
3954 possibly be the same. This helps keeping the type-pair hashtable
3955 small, only tracking comparisons for hash collisions. */
3956 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3959 /* If we've visited this type pair before (in the case of aggregates
3960 with self-referential types), and we made a decision, return it. */
3961 p = lookup_type_pair (t1, t2);
3962 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3964 /* We have already decided whether T1 and T2 are the
3965 same, return the cached result. */
3966 return p->same_p[GTC_MERGE] == 1;
3969 /* Now set up the SCC machinery for the comparison. */
3970 gtc_next_dfs_num = 1;
3971 sccstate = pointer_map_create ();
3972 gcc_obstack_init (&sccstate_obstack);
3973 res = gimple_types_compatible_p_1 (t1, t2, p,
3974 &sccstack, sccstate, &sccstate_obstack);
3975 VEC_free (type_pair_t, heap, sccstack);
3976 pointer_map_destroy (sccstate);
3977 obstack_free (&sccstate_obstack, NULL);
3984 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3985 struct pointer_map_t *, struct obstack *);
3987 /* DFS visit the edge from the callers type with state *STATE to T.
3988 Update the callers type hash V with the hash for T if it is not part
3989 of the SCC containing the callers type and return it.
3990 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3993 visit (tree t, struct sccs *state, hashval_t v,
3994 VEC (tree, heap) **sccstack,
3995 struct pointer_map_t *sccstate,
3996 struct obstack *sccstate_obstack)
3998 struct sccs *cstate = NULL;
3999 struct tree_int_map m;
4002 /* If there is a hash value recorded for this type then it can't
4003 possibly be part of our parent SCC. Simply mix in its hash. */
4005 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4007 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
4009 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
4010 cstate = (struct sccs *)*slot;
4014 /* Not yet visited. DFS recurse. */
4015 tem = iterative_hash_gimple_type (t, v,
4016 sccstack, sccstate, sccstate_obstack);
4018 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4019 state->low = MIN (state->low, cstate->low);
4020 /* If the type is no longer on the SCC stack and thus is not part
4021 of the parents SCC mix in its hash value. Otherwise we will
4022 ignore the type for hashing purposes and return the unaltered
4024 if (!cstate->on_sccstack)
4027 if (cstate->dfsnum < state->dfsnum
4028 && cstate->on_sccstack)
4029 state->low = MIN (cstate->dfsnum, state->low);
4031 /* We are part of our parents SCC, skip this type during hashing
4032 and return the unaltered hash value. */
4036 /* Hash NAME with the previous hash value V and return it. */
4039 iterative_hash_name (tree name, hashval_t v)
4043 if (TREE_CODE (name) == TYPE_DECL)
4044 name = DECL_NAME (name);
4047 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4048 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4051 /* A type, hashvalue pair for sorting SCC members. */
4053 struct type_hash_pair {
4058 /* Compare two type, hashvalue pairs. */
4061 type_hash_pair_compare (const void *p1_, const void *p2_)
4063 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4064 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4065 if (p1->hash < p2->hash)
4067 else if (p1->hash > p2->hash)
4072 /* Returning a hash value for gimple type TYPE combined with VAL.
4073 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4075 To hash a type we end up hashing in types that are reachable.
4076 Through pointers we can end up with cycles which messes up the
4077 required property that we need to compute the same hash value
4078 for structurally equivalent types. To avoid this we have to
4079 hash all types in a cycle (the SCC) in a commutative way. The
4080 easiest way is to not mix in the hashes of the SCC members at
4081 all. To make this work we have to delay setting the hash
4082 values of the SCC until it is complete. */
4085 iterative_hash_gimple_type (tree type, hashval_t val,
4086 VEC(tree, heap) **sccstack,
4087 struct pointer_map_t *sccstate,
4088 struct obstack *sccstate_obstack)
4094 /* Not visited during this DFS walk. */
4095 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4096 state = XOBNEW (sccstate_obstack, struct sccs);
4097 *pointer_map_insert (sccstate, type) = state;
4099 VEC_safe_push (tree, heap, *sccstack, type);
4100 state->dfsnum = next_dfs_num++;
4101 state->low = state->dfsnum;
4102 state->on_sccstack = true;
4104 /* Combine a few common features of types so that types are grouped into
4105 smaller sets; when searching for existing matching types to merge,
4106 only existing types having the same features as the new type will be
4108 v = iterative_hash_name (TYPE_NAME (type), 0);
4109 v = iterative_hash_hashval_t (TREE_CODE (type), v);
4110 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4111 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4113 /* Do not hash the types size as this will cause differences in
4114 hash values for the complete vs. the incomplete type variant. */
4116 /* Incorporate common features of numerical types. */
4117 if (INTEGRAL_TYPE_P (type)
4118 || SCALAR_FLOAT_TYPE_P (type)
4119 || FIXED_POINT_TYPE_P (type))
4121 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4122 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4123 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4126 /* For pointer and reference types, fold in information about the type
4128 if (POINTER_TYPE_P (type))
4129 v = visit (TREE_TYPE (type), state, v,
4130 sccstack, sccstate, sccstate_obstack);
4132 /* For integer types hash the types min/max values and the string flag. */
4133 if (TREE_CODE (type) == INTEGER_TYPE)
4135 /* OMP lowering can introduce error_mark_node in place of
4136 random local decls in types. */
4137 if (TYPE_MIN_VALUE (type) != error_mark_node)
4138 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4139 if (TYPE_MAX_VALUE (type) != error_mark_node)
4140 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4141 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4144 /* For array types hash their domain and the string flag. */
4145 if (TREE_CODE (type) == ARRAY_TYPE
4146 && TYPE_DOMAIN (type))
4148 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4149 v = visit (TYPE_DOMAIN (type), state, v,
4150 sccstack, sccstate, sccstate_obstack);
4153 /* Recurse for aggregates with a single element type. */
4154 if (TREE_CODE (type) == ARRAY_TYPE
4155 || TREE_CODE (type) == COMPLEX_TYPE
4156 || TREE_CODE (type) == VECTOR_TYPE)
4157 v = visit (TREE_TYPE (type), state, v,
4158 sccstack, sccstate, sccstate_obstack);
4160 /* Incorporate function return and argument types. */
4161 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4166 /* For method types also incorporate their parent class. */
4167 if (TREE_CODE (type) == METHOD_TYPE)
4168 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4169 sccstack, sccstate, sccstate_obstack);
4171 /* Check result and argument types. */
4172 v = visit (TREE_TYPE (type), state, v,
4173 sccstack, sccstate, sccstate_obstack);
4174 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4176 v = visit (TREE_VALUE (p), state, v,
4177 sccstack, sccstate, sccstate_obstack);
4181 v = iterative_hash_hashval_t (na, v);
4184 if (TREE_CODE (type) == RECORD_TYPE
4185 || TREE_CODE (type) == UNION_TYPE
4186 || TREE_CODE (type) == QUAL_UNION_TYPE)
4191 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4193 v = iterative_hash_name (DECL_NAME (f), v);
4194 v = visit (TREE_TYPE (f), state, v,
4195 sccstack, sccstate, sccstate_obstack);
4199 v = iterative_hash_hashval_t (nf, v);
4202 /* Record hash for us. */
4205 /* See if we found an SCC. */
4206 if (state->low == state->dfsnum)
4209 struct tree_int_map *m;
4211 /* Pop off the SCC and set its hash values. */
4212 x = VEC_pop (tree, *sccstack);
4213 /* Optimize SCC size one. */
4216 state->on_sccstack = false;
4217 m = ggc_alloc_cleared_tree_int_map ();
4220 slot = htab_find_slot (type_hash_cache, m, INSERT);
4221 gcc_assert (!*slot);
4226 struct sccs *cstate;
4227 unsigned first, i, size, j;
4228 struct type_hash_pair *pairs;
4229 /* Pop off the SCC and build an array of type, hash pairs. */
4230 first = VEC_length (tree, *sccstack) - 1;
4231 while (VEC_index (tree, *sccstack, first) != type)
4233 size = VEC_length (tree, *sccstack) - first + 1;
4234 pairs = XALLOCAVEC (struct type_hash_pair, size);
4236 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4237 cstate->on_sccstack = false;
4239 pairs[i].hash = cstate->u.hash;
4242 x = VEC_pop (tree, *sccstack);
4243 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4244 cstate->on_sccstack = false;
4247 pairs[i].hash = cstate->u.hash;
4250 gcc_assert (i + 1 == size);
4251 /* Sort the arrays of type, hash pairs so that when we mix in
4252 all members of the SCC the hash value becomes independent on
4253 the order we visited the SCC. Disregard hashes equal to
4254 the hash of the type we mix into because we cannot guarantee
4255 a stable sort for those across different TUs. */
4256 qsort (pairs, size, sizeof (struct type_hash_pair),
4257 type_hash_pair_compare);
4258 for (i = 0; i < size; ++i)
4261 m = ggc_alloc_cleared_tree_int_map ();
4262 m->base.from = pairs[i].type;
4263 hash = pairs[i].hash;
4264 /* Skip same hashes. */
4265 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4267 for (; j < size; ++j)
4268 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4269 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4270 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4272 if (pairs[i].type == type)
4274 slot = htab_find_slot (type_hash_cache, m, INSERT);
4275 gcc_assert (!*slot);
4281 return iterative_hash_hashval_t (v, val);
4285 /* Returns a hash value for P (assumed to be a type). The hash value
4286 is computed using some distinguishing features of the type. Note
4287 that we cannot use pointer hashing here as we may be dealing with
4288 two distinct instances of the same type.
4290 This function should produce the same hash value for two compatible
4291 types according to gimple_types_compatible_p. */
4294 gimple_type_hash (const void *p)
4296 const_tree t = (const_tree) p;
4297 VEC(tree, heap) *sccstack = NULL;
4298 struct pointer_map_t *sccstate;
4299 struct obstack sccstate_obstack;
4302 struct tree_int_map m;
4304 if (type_hash_cache == NULL)
4305 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4306 tree_int_map_eq, NULL);
4308 m.base.from = CONST_CAST_TREE (t);
4309 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4311 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4313 /* Perform a DFS walk and pre-hash all reachable types. */
4315 sccstate = pointer_map_create ();
4316 gcc_obstack_init (&sccstate_obstack);
4317 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4318 &sccstack, sccstate, &sccstate_obstack);
4319 VEC_free (tree, heap, sccstack);
4320 pointer_map_destroy (sccstate);
4321 obstack_free (&sccstate_obstack, NULL);
4326 /* Returning a hash value for gimple type TYPE combined with VAL.
4328 The hash value returned is equal for types considered compatible
4329 by gimple_canonical_types_compatible_p. */
4332 iterative_hash_canonical_type (tree type, hashval_t val)
4336 struct tree_int_map *mp, m;
4339 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4341 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4343 /* Combine a few common features of types so that types are grouped into
4344 smaller sets; when searching for existing matching types to merge,
4345 only existing types having the same features as the new type will be
4347 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4348 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4349 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4350 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4352 /* Incorporate common features of numerical types. */
4353 if (INTEGRAL_TYPE_P (type)
4354 || SCALAR_FLOAT_TYPE_P (type)
4355 || FIXED_POINT_TYPE_P (type)
4356 || TREE_CODE (type) == VECTOR_TYPE
4357 || TREE_CODE (type) == COMPLEX_TYPE
4358 || TREE_CODE (type) == OFFSET_TYPE
4359 || POINTER_TYPE_P (type))
4361 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4362 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4365 /* For pointer and reference types, fold in information about the type
4366 pointed to but do not recurse to the pointed-to type. */
4367 if (POINTER_TYPE_P (type))
4369 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4370 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4371 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4372 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4375 /* For integer types hash the types min/max values and the string flag. */
4376 if (TREE_CODE (type) == INTEGER_TYPE)
4378 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4379 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4382 /* For array types hash their domain and the string flag. */
4383 if (TREE_CODE (type) == ARRAY_TYPE
4384 && TYPE_DOMAIN (type))
4386 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4387 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4390 /* Recurse for aggregates with a single element type. */
4391 if (TREE_CODE (type) == ARRAY_TYPE
4392 || TREE_CODE (type) == COMPLEX_TYPE
4393 || TREE_CODE (type) == VECTOR_TYPE)
4394 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4396 /* Incorporate function return and argument types. */
4397 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4402 /* For method types also incorporate their parent class. */
4403 if (TREE_CODE (type) == METHOD_TYPE)
4404 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4406 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4408 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4410 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4414 v = iterative_hash_hashval_t (na, v);
4417 if (TREE_CODE (type) == RECORD_TYPE
4418 || TREE_CODE (type) == UNION_TYPE
4419 || TREE_CODE (type) == QUAL_UNION_TYPE)
4424 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4425 if (TREE_CODE (f) == FIELD_DECL)
4427 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4431 v = iterative_hash_hashval_t (nf, v);
4434 /* Cache the just computed hash value. */
4435 mp = ggc_alloc_cleared_tree_int_map ();
4436 mp->base.from = type;
4438 *slot = (void *) mp;
4440 return iterative_hash_hashval_t (v, val);
4444 gimple_canonical_type_hash (const void *p)
4446 if (canonical_type_hash_cache == NULL)
4447 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4448 tree_int_map_eq, NULL);
4450 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4454 /* Returns nonzero if P1 and P2 are equal. */
4457 gimple_type_eq (const void *p1, const void *p2)
4459 const_tree t1 = (const_tree) p1;
4460 const_tree t2 = (const_tree) p2;
4461 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4462 CONST_CAST_TREE (t2));
4466 /* Worker for gimple_register_type.
4467 Register type T in the global type table gimple_types.
4468 When REGISTERING_MV is false first recurse for the main variant of T. */
4471 gimple_register_type_1 (tree t, bool registering_mv)
4474 gimple_type_leader_entry *leader;
4476 /* If we registered this type before return the cached result. */
4477 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4478 if (leader->type == t)
4479 return leader->leader;
4481 /* Always register the main variant first. This is important so we
4482 pick up the non-typedef variants as canonical, otherwise we'll end
4483 up taking typedef ids for structure tags during comparison.
4484 It also makes sure that main variants will be merged to main variants.
4485 As we are operating on a possibly partially fixed up type graph
4486 do not bother to recurse more than once, otherwise we may end up
4488 If we are registering a main variant it will either remain its
4489 own main variant or it will be merged to something else in which
4490 case we do not care for the main variant leader. */
4492 && TYPE_MAIN_VARIANT (t) != t)
4493 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4495 /* See if we already have an equivalent type registered. */
4496 slot = htab_find_slot (gimple_types, t, INSERT);
4498 && *(tree *)slot != t)
4500 tree new_type = (tree) *((tree *) slot);
4502 leader->leader = new_type;
4506 /* If not, insert it to the cache and the hash. */
4513 /* Register type T in the global type table gimple_types.
4514 If another type T', compatible with T, already existed in
4515 gimple_types then return T', otherwise return T. This is used by
4516 LTO to merge identical types read from different TUs. */
4519 gimple_register_type (tree t)
4521 gcc_assert (TYPE_P (t));
4523 if (!gimple_type_leader)
4524 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4525 (GIMPLE_TYPE_LEADER_SIZE);
4527 if (gimple_types == NULL)
4528 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4530 return gimple_register_type_1 (t, false);
4533 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4534 the middle-end types_compatible_p function. It needs to avoid
4535 claiming types are different for types that should be treated
4536 the same with respect to TBAA. Canonical types are also used
4537 for IL consistency checks via the useless_type_conversion_p
4538 predicate which does not handle all type kinds itself but falls
4539 back to pointer-comparison of TYPE_CANONICAL for aggregates
4542 /* Return true iff T1 and T2 are structurally identical for what
4543 TBAA is concerned. */
4546 gimple_canonical_types_compatible_p (tree t1, tree t2)
4548 /* Before starting to set up the SCC machinery handle simple cases. */
4550 /* Check first for the obvious case of pointer identity. */
4554 /* Check that we have two types to compare. */
4555 if (t1 == NULL_TREE || t2 == NULL_TREE)
4558 /* If the types have been previously registered and found equal
4560 if (TYPE_CANONICAL (t1)
4561 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4564 /* Can't be the same type if the types don't have the same code. */
4565 if (TREE_CODE (t1) != TREE_CODE (t2))
4568 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4571 /* Qualifiers do not matter for canonical type comparison purposes. */
4573 /* Void types and nullptr types are always the same. */
4574 if (TREE_CODE (t1) == VOID_TYPE
4575 || TREE_CODE (t1) == NULLPTR_TYPE)
4578 /* Can't be the same type if they have different alignment, or mode. */
4579 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4580 || TYPE_MODE (t1) != TYPE_MODE (t2))
4583 /* Non-aggregate types can be handled cheaply. */
4584 if (INTEGRAL_TYPE_P (t1)
4585 || SCALAR_FLOAT_TYPE_P (t1)
4586 || FIXED_POINT_TYPE_P (t1)
4587 || TREE_CODE (t1) == VECTOR_TYPE
4588 || TREE_CODE (t1) == COMPLEX_TYPE
4589 || TREE_CODE (t1) == OFFSET_TYPE
4590 || POINTER_TYPE_P (t1))
4592 /* Can't be the same type if they have different sign or precision. */
4593 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4594 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4597 if (TREE_CODE (t1) == INTEGER_TYPE
4598 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4599 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4602 /* For canonical type comparisons we do not want to build SCCs
4603 so we cannot compare pointed-to types. But we can, for now,
4604 require the same pointed-to type kind and match what
4605 useless_type_conversion_p would do. */
4606 if (POINTER_TYPE_P (t1))
4608 /* If the two pointers have different ref-all attributes,
4609 they can't be the same type. */
4610 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4613 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4614 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4617 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4620 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4624 /* Tail-recurse to components. */
4625 if (TREE_CODE (t1) == VECTOR_TYPE
4626 || TREE_CODE (t1) == COMPLEX_TYPE)
4627 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4633 /* If their attributes are not the same they can't be the same type. */
4634 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4637 /* Do type-specific comparisons. */
4638 switch (TREE_CODE (t1))
4641 /* Array types are the same if the element types are the same and
4642 the number of elements are the same. */
4643 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4644 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4645 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4649 tree i1 = TYPE_DOMAIN (t1);
4650 tree i2 = TYPE_DOMAIN (t2);
4652 /* For an incomplete external array, the type domain can be
4653 NULL_TREE. Check this condition also. */
4654 if (i1 == NULL_TREE && i2 == NULL_TREE)
4656 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4658 /* If for a complete array type the possibly gimplified sizes
4659 are different the types are different. */
4660 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4663 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4667 tree min1 = TYPE_MIN_VALUE (i1);
4668 tree min2 = TYPE_MIN_VALUE (i2);
4669 tree max1 = TYPE_MAX_VALUE (i1);
4670 tree max2 = TYPE_MAX_VALUE (i2);
4672 /* The minimum/maximum values have to be the same. */
4675 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4676 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4677 || operand_equal_p (min1, min2, 0))))
4680 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4681 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4682 || operand_equal_p (max1, max2, 0)))))
4690 /* Method types should belong to the same class. */
4691 if (!gimple_canonical_types_compatible_p
4692 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4698 /* Function types are the same if the return type and arguments types
4700 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4703 if (!comp_type_attributes (t1, t2))
4706 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4710 tree parms1, parms2;
4712 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4714 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4716 if (!gimple_canonical_types_compatible_p
4717 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4721 if (parms1 || parms2)
4729 case QUAL_UNION_TYPE:
4733 /* For aggregate types, all the fields must be the same. */
4734 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4736 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4738 /* Skip non-fields. */
4739 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4740 f1 = TREE_CHAIN (f1);
4741 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4742 f2 = TREE_CHAIN (f2);
4745 /* The fields must have the same name, offset and type. */
4746 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4747 || !gimple_compare_field_offset (f1, f2)
4748 || !gimple_canonical_types_compatible_p
4749 (TREE_TYPE (f1), TREE_TYPE (f2)))
4753 /* If one aggregate has more fields than the other, they
4754 are not the same. */
4767 /* Returns nonzero if P1 and P2 are equal. */
4770 gimple_canonical_type_eq (const void *p1, const void *p2)
4772 const_tree t1 = (const_tree) p1;
4773 const_tree t2 = (const_tree) p2;
4774 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4775 CONST_CAST_TREE (t2));
4778 /* Register type T in the global type table gimple_types.
4779 If another type T', compatible with T, already existed in
4780 gimple_types then return T', otherwise return T. This is used by
4781 LTO to merge identical types read from different TUs.
4783 ??? This merging does not exactly match how the tree.c middle-end
4784 functions will assign TYPE_CANONICAL when new types are created
4785 during optimization (which at least happens for pointer and array
4789 gimple_register_canonical_type (tree t)
4793 gcc_assert (TYPE_P (t));
4795 if (TYPE_CANONICAL (t))
4796 return TYPE_CANONICAL (t);
4798 if (gimple_canonical_types == NULL)
4799 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4800 gimple_canonical_type_eq, 0);
4802 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4804 && *(tree *)slot != t)
4806 tree new_type = (tree) *((tree *) slot);
4808 TYPE_CANONICAL (t) = new_type;
4813 TYPE_CANONICAL (t) = t;
4821 /* Show statistics on references to the global type table gimple_types. */
4824 print_gimple_types_stats (void)
4827 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4828 "%ld searches, %ld collisions (ratio: %f)\n",
4829 (long) htab_size (gimple_types),
4830 (long) htab_elements (gimple_types),
4831 (long) gimple_types->searches,
4832 (long) gimple_types->collisions,
4833 htab_collisions (gimple_types));
4835 fprintf (stderr, "GIMPLE type table is empty\n");
4836 if (type_hash_cache)
4837 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4838 "%ld searches, %ld collisions (ratio: %f)\n",
4839 (long) htab_size (type_hash_cache),
4840 (long) htab_elements (type_hash_cache),
4841 (long) type_hash_cache->searches,
4842 (long) type_hash_cache->collisions,
4843 htab_collisions (type_hash_cache));
4845 fprintf (stderr, "GIMPLE type hash table is empty\n");
4846 if (gimple_canonical_types)
4847 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4848 "%ld searches, %ld collisions (ratio: %f)\n",
4849 (long) htab_size (gimple_canonical_types),
4850 (long) htab_elements (gimple_canonical_types),
4851 (long) gimple_canonical_types->searches,
4852 (long) gimple_canonical_types->collisions,
4853 htab_collisions (gimple_canonical_types));
4855 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4856 if (canonical_type_hash_cache)
4857 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4858 "%ld searches, %ld collisions (ratio: %f)\n",
4859 (long) htab_size (canonical_type_hash_cache),
4860 (long) htab_elements (canonical_type_hash_cache),
4861 (long) canonical_type_hash_cache->searches,
4862 (long) canonical_type_hash_cache->collisions,
4863 htab_collisions (canonical_type_hash_cache));
4865 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4868 /* Free the gimple type hashtables used for LTO type merging. */
4871 free_gimple_type_tables (void)
4873 /* Last chance to print stats for the tables. */
4874 if (flag_lto_report)
4875 print_gimple_types_stats ();
4879 htab_delete (gimple_types);
4880 gimple_types = NULL;
4882 if (gimple_canonical_types)
4884 htab_delete (gimple_canonical_types);
4885 gimple_canonical_types = NULL;
4887 if (type_hash_cache)
4889 htab_delete (type_hash_cache);
4890 type_hash_cache = NULL;
4892 if (canonical_type_hash_cache)
4894 htab_delete (canonical_type_hash_cache);
4895 canonical_type_hash_cache = NULL;
4897 if (type_pair_cache)
4899 free (type_pair_cache);
4900 type_pair_cache = NULL;
4902 gimple_type_leader = NULL;
4906 /* Return a type the same as TYPE except unsigned or
4907 signed according to UNSIGNEDP. */
4910 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4914 type1 = TYPE_MAIN_VARIANT (type);
4915 if (type1 == signed_char_type_node
4916 || type1 == char_type_node
4917 || type1 == unsigned_char_type_node)
4918 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4919 if (type1 == integer_type_node || type1 == unsigned_type_node)
4920 return unsignedp ? unsigned_type_node : integer_type_node;
4921 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4922 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4923 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4924 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4925 if (type1 == long_long_integer_type_node
4926 || type1 == long_long_unsigned_type_node)
4928 ? long_long_unsigned_type_node
4929 : long_long_integer_type_node;
4930 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4932 ? int128_unsigned_type_node
4933 : int128_integer_type_node;
4934 #if HOST_BITS_PER_WIDE_INT >= 64
4935 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4936 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4938 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4939 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4940 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4941 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4942 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4943 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4944 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4945 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4947 #define GIMPLE_FIXED_TYPES(NAME) \
4948 if (type1 == short_ ## NAME ## _type_node \
4949 || type1 == unsigned_short_ ## NAME ## _type_node) \
4950 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4951 : short_ ## NAME ## _type_node; \
4952 if (type1 == NAME ## _type_node \
4953 || type1 == unsigned_ ## NAME ## _type_node) \
4954 return unsignedp ? unsigned_ ## NAME ## _type_node \
4955 : NAME ## _type_node; \
4956 if (type1 == long_ ## NAME ## _type_node \
4957 || type1 == unsigned_long_ ## NAME ## _type_node) \
4958 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4959 : long_ ## NAME ## _type_node; \
4960 if (type1 == long_long_ ## NAME ## _type_node \
4961 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4962 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4963 : long_long_ ## NAME ## _type_node;
4965 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4966 if (type1 == NAME ## _type_node \
4967 || type1 == u ## NAME ## _type_node) \
4968 return unsignedp ? u ## NAME ## _type_node \
4969 : NAME ## _type_node;
4971 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4972 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4973 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4974 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4975 : sat_ ## short_ ## NAME ## _type_node; \
4976 if (type1 == sat_ ## NAME ## _type_node \
4977 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4978 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4979 : sat_ ## NAME ## _type_node; \
4980 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4981 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4982 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4983 : sat_ ## long_ ## NAME ## _type_node; \
4984 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4985 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4986 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4987 : sat_ ## long_long_ ## NAME ## _type_node;
4989 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4990 if (type1 == sat_ ## NAME ## _type_node \
4991 || type1 == sat_ ## u ## NAME ## _type_node) \
4992 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4993 : sat_ ## NAME ## _type_node;
4995 GIMPLE_FIXED_TYPES (fract);
4996 GIMPLE_FIXED_TYPES_SAT (fract);
4997 GIMPLE_FIXED_TYPES (accum);
4998 GIMPLE_FIXED_TYPES_SAT (accum);
5000 GIMPLE_FIXED_MODE_TYPES (qq);
5001 GIMPLE_FIXED_MODE_TYPES (hq);
5002 GIMPLE_FIXED_MODE_TYPES (sq);
5003 GIMPLE_FIXED_MODE_TYPES (dq);
5004 GIMPLE_FIXED_MODE_TYPES (tq);
5005 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
5006 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
5007 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
5008 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
5009 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
5010 GIMPLE_FIXED_MODE_TYPES (ha);
5011 GIMPLE_FIXED_MODE_TYPES (sa);
5012 GIMPLE_FIXED_MODE_TYPES (da);
5013 GIMPLE_FIXED_MODE_TYPES (ta);
5014 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
5015 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
5016 GIMPLE_FIXED_MODE_TYPES_SAT (da);
5017 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
5019 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
5020 the precision; they have precision set to match their range, but
5021 may use a wider mode to match an ABI. If we change modes, we may
5022 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5023 the precision as well, so as to yield correct results for
5024 bit-field types. C++ does not have these separate bit-field
5025 types, and producing a signed or unsigned variant of an
5026 ENUMERAL_TYPE may cause other problems as well. */
5027 if (!INTEGRAL_TYPE_P (type)
5028 || TYPE_UNSIGNED (type) == unsignedp)
5031 #define TYPE_OK(node) \
5032 (TYPE_MODE (type) == TYPE_MODE (node) \
5033 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5034 if (TYPE_OK (signed_char_type_node))
5035 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
5036 if (TYPE_OK (integer_type_node))
5037 return unsignedp ? unsigned_type_node : integer_type_node;
5038 if (TYPE_OK (short_integer_type_node))
5039 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5040 if (TYPE_OK (long_integer_type_node))
5041 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5042 if (TYPE_OK (long_long_integer_type_node))
5044 ? long_long_unsigned_type_node
5045 : long_long_integer_type_node);
5046 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
5048 ? int128_unsigned_type_node
5049 : int128_integer_type_node);
5051 #if HOST_BITS_PER_WIDE_INT >= 64
5052 if (TYPE_OK (intTI_type_node))
5053 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5055 if (TYPE_OK (intDI_type_node))
5056 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5057 if (TYPE_OK (intSI_type_node))
5058 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5059 if (TYPE_OK (intHI_type_node))
5060 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5061 if (TYPE_OK (intQI_type_node))
5062 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5064 #undef GIMPLE_FIXED_TYPES
5065 #undef GIMPLE_FIXED_MODE_TYPES
5066 #undef GIMPLE_FIXED_TYPES_SAT
5067 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5070 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5074 /* Return an unsigned type the same as TYPE in other respects. */
5077 gimple_unsigned_type (tree type)
5079 return gimple_signed_or_unsigned_type (true, type);
5083 /* Return a signed type the same as TYPE in other respects. */
5086 gimple_signed_type (tree type)
5088 return gimple_signed_or_unsigned_type (false, type);
5092 /* Return the typed-based alias set for T, which may be an expression
5093 or a type. Return -1 if we don't do anything special. */
5096 gimple_get_alias_set (tree t)
5100 /* Permit type-punning when accessing a union, provided the access
5101 is directly through the union. For example, this code does not
5102 permit taking the address of a union member and then storing
5103 through it. Even the type-punning allowed here is a GCC
5104 extension, albeit a common and useful one; the C standard says
5105 that such accesses have implementation-defined behavior. */
5107 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5108 u = TREE_OPERAND (u, 0))
5109 if (TREE_CODE (u) == COMPONENT_REF
5110 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5113 /* That's all the expressions we handle specially. */
5117 /* For convenience, follow the C standard when dealing with
5118 character types. Any object may be accessed via an lvalue that
5119 has character type. */
5120 if (t == char_type_node
5121 || t == signed_char_type_node
5122 || t == unsigned_char_type_node)
5125 /* Allow aliasing between signed and unsigned variants of the same
5126 type. We treat the signed variant as canonical. */
5127 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5129 tree t1 = gimple_signed_type (t);
5131 /* t1 == t can happen for boolean nodes which are always unsigned. */
5133 return get_alias_set (t1);
5140 /* Data structure used to count the number of dereferences to PTR
5141 inside an expression. */
5145 unsigned num_stores;
5149 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5150 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5153 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5155 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5156 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5158 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5159 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5160 the address of 'fld' as 'ptr + offsetof(fld)'. */
5161 if (TREE_CODE (*tp) == ADDR_EXPR)
5167 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5170 count_p->num_stores++;
5172 count_p->num_loads++;
5178 /* Count the number of direct and indirect uses for pointer PTR in
5179 statement STMT. The number of direct uses is stored in
5180 *NUM_USES_P. Indirect references are counted separately depending
5181 on whether they are store or load operations. The counts are
5182 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5185 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5186 unsigned *num_loads_p, unsigned *num_stores_p)
5195 /* Find out the total number of uses of PTR in STMT. */
5196 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5200 /* Now count the number of indirect references to PTR. This is
5201 truly awful, but we don't have much choice. There are no parent
5202 pointers inside INDIRECT_REFs, so an expression like
5203 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5204 find all the indirect and direct uses of x_1 inside. The only
5205 shortcut we can take is the fact that GIMPLE only allows
5206 INDIRECT_REFs inside the expressions below. */
5207 if (is_gimple_assign (stmt)
5208 || gimple_code (stmt) == GIMPLE_RETURN
5209 || gimple_code (stmt) == GIMPLE_ASM
5210 || is_gimple_call (stmt))
5212 struct walk_stmt_info wi;
5213 struct count_ptr_d count;
5216 count.num_stores = 0;
5217 count.num_loads = 0;
5219 memset (&wi, 0, sizeof (wi));
5221 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5223 *num_stores_p = count.num_stores;
5224 *num_loads_p = count.num_loads;
5227 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5230 /* From a tree operand OP return the base of a load or store operation
5231 or NULL_TREE if OP is not a load or a store. */
5234 get_base_loadstore (tree op)
5236 while (handled_component_p (op))
5237 op = TREE_OPERAND (op, 0);
5239 || INDIRECT_REF_P (op)
5240 || TREE_CODE (op) == MEM_REF
5241 || TREE_CODE (op) == TARGET_MEM_REF)
5246 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5247 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5248 passing the STMT, the base of the operand and DATA to it. The base
5249 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5250 or the argument of an address expression.
5251 Returns the results of these callbacks or'ed. */
5254 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5255 bool (*visit_load)(gimple, tree, void *),
5256 bool (*visit_store)(gimple, tree, void *),
5257 bool (*visit_addr)(gimple, tree, void *))
5261 if (gimple_assign_single_p (stmt))
5266 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5268 ret |= visit_store (stmt, lhs, data);
5270 rhs = gimple_assign_rhs1 (stmt);
5271 while (handled_component_p (rhs))
5272 rhs = TREE_OPERAND (rhs, 0);
5275 if (TREE_CODE (rhs) == ADDR_EXPR)
5276 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5277 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5278 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5279 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5280 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5281 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5282 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5284 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5289 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5290 if (TREE_CODE (val) == ADDR_EXPR)
5291 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5292 else if (TREE_CODE (val) == OBJ_TYPE_REF
5293 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5294 ret |= visit_addr (stmt,
5295 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5298 lhs = gimple_assign_lhs (stmt);
5299 if (TREE_CODE (lhs) == TARGET_MEM_REF
5300 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5301 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5305 rhs = get_base_loadstore (rhs);
5307 ret |= visit_load (stmt, rhs, data);
5311 && (is_gimple_assign (stmt)
5312 || gimple_code (stmt) == GIMPLE_COND))
5314 for (i = 0; i < gimple_num_ops (stmt); ++i)
5315 if (gimple_op (stmt, i)
5316 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
5317 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
5319 else if (is_gimple_call (stmt))
5323 tree lhs = gimple_call_lhs (stmt);
5326 lhs = get_base_loadstore (lhs);
5328 ret |= visit_store (stmt, lhs, data);
5331 if (visit_load || visit_addr)
5332 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5334 tree rhs = gimple_call_arg (stmt, i);
5336 && TREE_CODE (rhs) == ADDR_EXPR)
5337 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5338 else if (visit_load)
5340 rhs = get_base_loadstore (rhs);
5342 ret |= visit_load (stmt, rhs, data);
5346 && gimple_call_chain (stmt)
5347 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5348 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5351 && gimple_call_return_slot_opt_p (stmt)
5352 && gimple_call_lhs (stmt) != NULL_TREE
5353 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5354 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5356 else if (gimple_code (stmt) == GIMPLE_ASM)
5359 const char *constraint;
5360 const char **oconstraints;
5361 bool allows_mem, allows_reg, is_inout;
5362 noutputs = gimple_asm_noutputs (stmt);
5363 oconstraints = XALLOCAVEC (const char *, noutputs);
5364 if (visit_store || visit_addr)
5365 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5367 tree link = gimple_asm_output_op (stmt, i);
5368 tree op = get_base_loadstore (TREE_VALUE (link));
5369 if (op && visit_store)
5370 ret |= visit_store (stmt, op, data);
5373 constraint = TREE_STRING_POINTER
5374 (TREE_VALUE (TREE_PURPOSE (link)));
5375 oconstraints[i] = constraint;
5376 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5377 &allows_reg, &is_inout);
5378 if (op && !allows_reg && allows_mem)
5379 ret |= visit_addr (stmt, op, data);
5382 if (visit_load || visit_addr)
5383 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5385 tree link = gimple_asm_input_op (stmt, i);
5386 tree op = TREE_VALUE (link);
5388 && TREE_CODE (op) == ADDR_EXPR)
5389 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5390 else if (visit_load || visit_addr)
5392 op = get_base_loadstore (op);
5396 ret |= visit_load (stmt, op, data);
5399 constraint = TREE_STRING_POINTER
5400 (TREE_VALUE (TREE_PURPOSE (link)));
5401 parse_input_constraint (&constraint, 0, 0, noutputs,
5403 &allows_mem, &allows_reg);
5404 if (!allows_reg && allows_mem)
5405 ret |= visit_addr (stmt, op, data);
5411 else if (gimple_code (stmt) == GIMPLE_RETURN)
5413 tree op = gimple_return_retval (stmt);
5417 && TREE_CODE (op) == ADDR_EXPR)
5418 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5419 else if (visit_load)
5421 op = get_base_loadstore (op);
5423 ret |= visit_load (stmt, op, data);
5428 && gimple_code (stmt) == GIMPLE_PHI)
5430 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5432 tree op = PHI_ARG_DEF (stmt, i);
5433 if (TREE_CODE (op) == ADDR_EXPR)
5434 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5441 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5442 should make a faster clone for this case. */
5445 walk_stmt_load_store_ops (gimple stmt, void *data,
5446 bool (*visit_load)(gimple, tree, void *),
5447 bool (*visit_store)(gimple, tree, void *))
5449 return walk_stmt_load_store_addr_ops (stmt, data,
5450 visit_load, visit_store, NULL);
5453 /* Helper for gimple_ior_addresses_taken_1. */
5456 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5457 tree addr, void *data)
5459 bitmap addresses_taken = (bitmap)data;
5460 addr = get_base_address (addr);
5464 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5470 /* Set the bit for the uid of all decls that have their address taken
5471 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5472 were any in this stmt. */
5475 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5477 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5478 gimple_ior_addresses_taken_1);
5482 /* Return a printable name for symbol DECL. */
5485 gimple_decl_printable_name (tree decl, int verbosity)
5487 if (!DECL_NAME (decl))
5490 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5492 const char *str, *mangled_str;
5493 int dmgl_opts = DMGL_NO_OPTS;
5497 dmgl_opts = DMGL_VERBOSE
5501 if (TREE_CODE (decl) == FUNCTION_DECL)
5502 dmgl_opts |= DMGL_PARAMS;
5505 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5506 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5507 return (str) ? str : mangled_str;
5510 return IDENTIFIER_POINTER (DECL_NAME (decl));
5513 /* Return true when STMT is builtins call to CODE. */
5516 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5519 return (is_gimple_call (stmt)
5520 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5521 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5522 && DECL_FUNCTION_CODE (fndecl) == code);
5525 /* Return true if STMT clobbers memory. STMT is required to be a
5529 gimple_asm_clobbers_memory_p (const_gimple stmt)
5533 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5535 tree op = gimple_asm_clobber_op (stmt, i);
5536 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5542 #include "gt-gimple.h"