1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
39 /* All the tuples have their operand vector (if present) at the very bottom
40 of the structure. Therefore, the offset required to find the
41 operands vector the size of the structure minus the size of the 1
42 element tree array at the end (see gimple_ops). */
43 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
44 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
45 EXPORTED_CONST size_t gimple_ops_offset_[] = {
46 #include "gsstruct.def"
50 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
51 static const size_t gsstruct_code_size[] = {
52 #include "gsstruct.def"
56 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
57 const char *const gimple_code_name[] = {
62 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
63 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
68 #ifdef GATHER_STATISTICS
71 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
72 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
74 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
75 static const char * const gimple_alloc_kind_names[] = {
83 #endif /* GATHER_STATISTICS */
85 /* A cache of gimple_seq objects. Sequences are created and destroyed
86 fairly often during gimplification. */
87 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
89 /* Private API manipulation functions shared only with some
91 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
92 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
94 /* Gimple tuple constructors.
95 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
96 be passed a NULL to start with an empty sequence. */
98 /* Set the code for statement G to CODE. */
101 gimple_set_code (gimple g, enum gimple_code code)
103 g->gsbase.code = code;
106 /* Return the number of bytes needed to hold a GIMPLE statement with
110 gimple_size (enum gimple_code code)
112 return gsstruct_code_size[gss_for_code (code)];
115 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
118 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
120 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
125 size = gimple_size (code);
127 size += sizeof (tree) * (num_ops - 1);
129 #ifdef GATHER_STATISTICS
131 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
132 gimple_alloc_counts[(int) kind]++;
133 gimple_alloc_sizes[(int) kind] += size;
137 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
138 gimple_set_code (stmt, code);
139 gimple_set_num_ops (stmt, num_ops);
141 /* Do not call gimple_set_modified here as it has other side
142 effects and this tuple is still not completely built. */
143 stmt->gsbase.modified = 1;
148 /* Set SUBCODE to be the code of the expression computed by statement G. */
151 gimple_set_subcode (gimple g, unsigned subcode)
153 /* We only have 16 bits for the RHS code. Assert that we are not
155 gcc_assert (subcode < (1 << 16));
156 g->gsbase.subcode = subcode;
161 /* Build a tuple with operands. CODE is the statement to build (which
162 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
163 for the new tuple. NUM_OPS is the number of operands to allocate. */
165 #define gimple_build_with_ops(c, s, n) \
166 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
169 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
170 unsigned num_ops MEM_STAT_DECL)
172 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
173 gimple_set_subcode (s, subcode);
179 /* Build a GIMPLE_RETURN statement returning RETVAL. */
182 gimple_build_return (tree retval)
184 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
186 gimple_return_set_retval (s, retval);
190 /* Helper for gimple_build_call, gimple_build_call_vec and
191 gimple_build_call_from_tree. Build the basic components of a
192 GIMPLE_CALL statement to function FN with NARGS arguments. */
195 gimple_build_call_1 (tree fn, unsigned nargs)
197 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
198 if (TREE_CODE (fn) == FUNCTION_DECL)
199 fn = build_fold_addr_expr (fn);
200 gimple_set_op (s, 1, fn);
205 /* Build a GIMPLE_CALL statement to function FN with the arguments
206 specified in vector ARGS. */
209 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
212 unsigned nargs = VEC_length (tree, args);
213 gimple call = gimple_build_call_1 (fn, nargs);
215 for (i = 0; i < nargs; i++)
216 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
222 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
223 arguments. The ... are the arguments. */
226 gimple_build_call (tree fn, unsigned nargs, ...)
232 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
234 call = gimple_build_call_1 (fn, nargs);
236 va_start (ap, nargs);
237 for (i = 0; i < nargs; i++)
238 gimple_call_set_arg (call, i, va_arg (ap, tree));
245 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
246 assumed to be in GIMPLE form already. Minimal checking is done of
250 gimple_build_call_from_tree (tree t)
254 tree fndecl = get_callee_fndecl (t);
256 gcc_assert (TREE_CODE (t) == CALL_EXPR);
258 nargs = call_expr_nargs (t);
259 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
261 for (i = 0; i < nargs; i++)
262 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
264 gimple_set_block (call, TREE_BLOCK (t));
266 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
267 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
268 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
269 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
270 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
271 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
272 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
273 gimple_set_no_warning (call, TREE_NO_WARNING (t));
279 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
280 *OP1_P and *OP2_P respectively. */
283 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
286 enum gimple_rhs_class grhs_class;
288 *subcode_p = TREE_CODE (expr);
289 grhs_class = get_gimple_rhs_class (*subcode_p);
291 if (grhs_class == GIMPLE_BINARY_RHS)
293 *op1_p = TREE_OPERAND (expr, 0);
294 *op2_p = TREE_OPERAND (expr, 1);
296 else if (grhs_class == GIMPLE_UNARY_RHS)
298 *op1_p = TREE_OPERAND (expr, 0);
301 else if (grhs_class == GIMPLE_SINGLE_RHS)
311 /* Build a GIMPLE_ASSIGN statement.
313 LHS of the assignment.
314 RHS of the assignment which can be unary or binary. */
317 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
319 enum tree_code subcode;
322 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
323 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
328 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
329 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
330 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
333 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
334 tree op2 MEM_STAT_DECL)
339 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
341 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
343 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
345 gimple_assign_set_lhs (p, lhs);
346 gimple_assign_set_rhs1 (p, op1);
349 gcc_assert (num_ops > 2);
350 gimple_assign_set_rhs2 (p, op2);
357 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
359 DST/SRC are the destination and source respectively. You can pass
360 ungimplified trees in DST or SRC, in which case they will be
361 converted to a gimple operand if necessary.
363 This function returns the newly created GIMPLE_ASSIGN tuple. */
366 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
368 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
369 gimplify_and_add (t, seq_p);
371 return gimple_seq_last_stmt (*seq_p);
375 /* Build a GIMPLE_COND statement.
377 PRED is the condition used to compare LHS and the RHS.
378 T_LABEL is the label to jump to if the condition is true.
379 F_LABEL is the label to jump to otherwise. */
382 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
383 tree t_label, tree f_label)
387 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
388 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
389 gimple_cond_set_lhs (p, lhs);
390 gimple_cond_set_rhs (p, rhs);
391 gimple_cond_set_true_label (p, t_label);
392 gimple_cond_set_false_label (p, f_label);
397 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
400 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
401 tree *lhs_p, tree *rhs_p)
403 location_t loc = EXPR_LOCATION (cond);
404 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
405 || TREE_CODE (cond) == TRUTH_NOT_EXPR
406 || is_gimple_min_invariant (cond)
407 || SSA_VAR_P (cond));
409 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
411 /* Canonicalize conditionals of the form 'if (!VAL)'. */
412 if (*code_p == TRUTH_NOT_EXPR)
415 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
416 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
418 /* Canonicalize conditionals of the form 'if (VAL)' */
419 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
422 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
423 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
428 /* Build a GIMPLE_COND statement from the conditional expression tree
429 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
432 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
437 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
438 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
441 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
442 boolean expression tree COND. */
445 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
450 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
451 gimple_cond_set_condition (stmt, code, lhs, rhs);
454 /* Build a GIMPLE_LABEL statement for LABEL. */
457 gimple_build_label (tree label)
459 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
460 gimple_label_set_label (p, label);
464 /* Build a GIMPLE_GOTO statement to label DEST. */
467 gimple_build_goto (tree dest)
469 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
470 gimple_goto_set_dest (p, dest);
475 /* Build a GIMPLE_NOP statement. */
478 gimple_build_nop (void)
480 return gimple_alloc (GIMPLE_NOP, 0);
484 /* Build a GIMPLE_BIND statement.
485 VARS are the variables in BODY.
486 BLOCK is the containing block. */
489 gimple_build_bind (tree vars, gimple_seq body, tree block)
491 gimple p = gimple_alloc (GIMPLE_BIND, 0);
492 gimple_bind_set_vars (p, vars);
494 gimple_bind_set_body (p, body);
496 gimple_bind_set_block (p, block);
500 /* Helper function to set the simple fields of a asm stmt.
502 STRING is a pointer to a string that is the asm blocks assembly code.
503 NINPUT is the number of register inputs.
504 NOUTPUT is the number of register outputs.
505 NCLOBBERS is the number of clobbered registers.
509 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
510 unsigned nclobbers, unsigned nlabels)
513 int size = strlen (string);
515 /* ASMs with labels cannot have outputs. This should have been
516 enforced by the front end. */
517 gcc_assert (nlabels == 0 || noutputs == 0);
519 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
520 ninputs + noutputs + nclobbers + nlabels);
522 p->gimple_asm.ni = ninputs;
523 p->gimple_asm.no = noutputs;
524 p->gimple_asm.nc = nclobbers;
525 p->gimple_asm.nl = nlabels;
526 p->gimple_asm.string = ggc_alloc_string (string, size);
528 #ifdef GATHER_STATISTICS
529 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
535 /* Build a GIMPLE_ASM statement.
537 STRING is the assembly code.
538 NINPUT is the number of register inputs.
539 NOUTPUT is the number of register outputs.
540 NCLOBBERS is the number of clobbered registers.
541 INPUTS is a vector of the input register parameters.
542 OUTPUTS is a vector of the output register parameters.
543 CLOBBERS is a vector of the clobbered register parameters.
544 LABELS is a vector of destination labels. */
547 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
548 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
549 VEC(tree,gc)* labels)
554 p = gimple_build_asm_1 (string,
555 VEC_length (tree, inputs),
556 VEC_length (tree, outputs),
557 VEC_length (tree, clobbers),
558 VEC_length (tree, labels));
560 for (i = 0; i < VEC_length (tree, inputs); i++)
561 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
563 for (i = 0; i < VEC_length (tree, outputs); i++)
564 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
566 for (i = 0; i < VEC_length (tree, clobbers); i++)
567 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
569 for (i = 0; i < VEC_length (tree, labels); i++)
570 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
575 /* Build a GIMPLE_CATCH statement.
577 TYPES are the catch types.
578 HANDLER is the exception handler. */
581 gimple_build_catch (tree types, gimple_seq handler)
583 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
584 gimple_catch_set_types (p, types);
586 gimple_catch_set_handler (p, handler);
591 /* Build a GIMPLE_EH_FILTER statement.
593 TYPES are the filter's types.
594 FAILURE is the filter's failure action. */
597 gimple_build_eh_filter (tree types, gimple_seq failure)
599 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
600 gimple_eh_filter_set_types (p, types);
602 gimple_eh_filter_set_failure (p, failure);
607 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
610 gimple_build_eh_must_not_throw (tree decl)
612 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1);
614 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
615 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
616 p->gimple_eh_mnt.fndecl = decl;
621 /* Build a GIMPLE_TRY statement.
623 EVAL is the expression to evaluate.
624 CLEANUP is the cleanup expression.
625 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
626 whether this is a try/catch or a try/finally respectively. */
629 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
630 enum gimple_try_flags kind)
634 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
635 p = gimple_alloc (GIMPLE_TRY, 0);
636 gimple_set_subcode (p, kind);
638 gimple_try_set_eval (p, eval);
640 gimple_try_set_cleanup (p, cleanup);
645 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
647 CLEANUP is the cleanup expression. */
650 gimple_build_wce (gimple_seq cleanup)
652 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
654 gimple_wce_set_cleanup (p, cleanup);
660 /* Build a GIMPLE_RESX statement. */
663 gimple_build_resx (int region)
665 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
666 p->gimple_eh_ctrl.region = region;
671 /* The helper for constructing a gimple switch statement.
672 INDEX is the switch's index.
673 NLABELS is the number of labels in the switch excluding the default.
674 DEFAULT_LABEL is the default label for the switch statement. */
677 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
679 /* nlabels + 1 default label + 1 index. */
680 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
681 1 + (default_label != NULL) + nlabels);
682 gimple_switch_set_index (p, index);
684 gimple_switch_set_default_label (p, default_label);
689 /* Build a GIMPLE_SWITCH statement.
691 INDEX is the switch's index.
692 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
693 ... are the labels excluding the default. */
696 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
700 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
702 /* Store the rest of the labels. */
703 va_start (al, default_label);
704 offset = (default_label != NULL);
705 for (i = 0; i < nlabels; i++)
706 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
713 /* Build a GIMPLE_SWITCH statement.
715 INDEX is the switch's index.
716 DEFAULT_LABEL is the default label
717 ARGS is a vector of labels excluding the default. */
720 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
722 unsigned i, offset, nlabels = VEC_length (tree, args);
723 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
725 /* Copy the labels from the vector to the switch statement. */
726 offset = (default_label != NULL);
727 for (i = 0; i < nlabels; i++)
728 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
733 /* Build a GIMPLE_EH_DISPATCH statement. */
736 gimple_build_eh_dispatch (int region)
738 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
739 p->gimple_eh_ctrl.region = region;
743 /* Build a new GIMPLE_DEBUG_BIND statement.
745 VAR is bound to VALUE; block and location are taken from STMT. */
748 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
750 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
751 (unsigned)GIMPLE_DEBUG_BIND, 2
754 gimple_debug_bind_set_var (p, var);
755 gimple_debug_bind_set_value (p, value);
758 gimple_set_block (p, gimple_block (stmt));
759 gimple_set_location (p, gimple_location (stmt));
766 /* Build a GIMPLE_OMP_CRITICAL statement.
768 BODY is the sequence of statements for which only one thread can execute.
769 NAME is optional identifier for this critical block. */
772 gimple_build_omp_critical (gimple_seq body, tree name)
774 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
775 gimple_omp_critical_set_name (p, name);
777 gimple_omp_set_body (p, body);
782 /* Build a GIMPLE_OMP_FOR statement.
784 BODY is sequence of statements inside the for loop.
785 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
786 lastprivate, reductions, ordered, schedule, and nowait.
787 COLLAPSE is the collapse count.
788 PRE_BODY is the sequence of statements that are loop invariant. */
791 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
794 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
796 gimple_omp_set_body (p, body);
797 gimple_omp_for_set_clauses (p, clauses);
798 p->gimple_omp_for.collapse = collapse;
799 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
801 gimple_omp_for_set_pre_body (p, pre_body);
807 /* Build a GIMPLE_OMP_PARALLEL statement.
809 BODY is sequence of statements which are executed in parallel.
810 CLAUSES, are the OMP parallel construct's clauses.
811 CHILD_FN is the function created for the parallel threads to execute.
812 DATA_ARG are the shared data argument(s). */
815 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
818 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
820 gimple_omp_set_body (p, body);
821 gimple_omp_parallel_set_clauses (p, clauses);
822 gimple_omp_parallel_set_child_fn (p, child_fn);
823 gimple_omp_parallel_set_data_arg (p, data_arg);
829 /* Build a GIMPLE_OMP_TASK statement.
831 BODY is sequence of statements which are executed by the explicit task.
832 CLAUSES, are the OMP parallel construct's clauses.
833 CHILD_FN is the function created for the parallel threads to execute.
834 DATA_ARG are the shared data argument(s).
835 COPY_FN is the optional function for firstprivate initialization.
836 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
839 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
840 tree data_arg, tree copy_fn, tree arg_size,
843 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
845 gimple_omp_set_body (p, body);
846 gimple_omp_task_set_clauses (p, clauses);
847 gimple_omp_task_set_child_fn (p, child_fn);
848 gimple_omp_task_set_data_arg (p, data_arg);
849 gimple_omp_task_set_copy_fn (p, copy_fn);
850 gimple_omp_task_set_arg_size (p, arg_size);
851 gimple_omp_task_set_arg_align (p, arg_align);
857 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
859 BODY is the sequence of statements in the section. */
862 gimple_build_omp_section (gimple_seq body)
864 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
866 gimple_omp_set_body (p, body);
872 /* Build a GIMPLE_OMP_MASTER statement.
874 BODY is the sequence of statements to be executed by just the master. */
877 gimple_build_omp_master (gimple_seq body)
879 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
881 gimple_omp_set_body (p, body);
887 /* Build a GIMPLE_OMP_CONTINUE statement.
889 CONTROL_DEF is the definition of the control variable.
890 CONTROL_USE is the use of the control variable. */
893 gimple_build_omp_continue (tree control_def, tree control_use)
895 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
896 gimple_omp_continue_set_control_def (p, control_def);
897 gimple_omp_continue_set_control_use (p, control_use);
901 /* Build a GIMPLE_OMP_ORDERED statement.
903 BODY is the sequence of statements inside a loop that will executed in
907 gimple_build_omp_ordered (gimple_seq body)
909 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
911 gimple_omp_set_body (p, body);
917 /* Build a GIMPLE_OMP_RETURN statement.
918 WAIT_P is true if this is a non-waiting return. */
921 gimple_build_omp_return (bool wait_p)
923 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
925 gimple_omp_return_set_nowait (p);
931 /* Build a GIMPLE_OMP_SECTIONS statement.
933 BODY is a sequence of section statements.
934 CLAUSES are any of the OMP sections contsruct's clauses: private,
935 firstprivate, lastprivate, reduction, and nowait. */
938 gimple_build_omp_sections (gimple_seq body, tree clauses)
940 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
942 gimple_omp_set_body (p, body);
943 gimple_omp_sections_set_clauses (p, clauses);
949 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
952 gimple_build_omp_sections_switch (void)
954 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
958 /* Build a GIMPLE_OMP_SINGLE statement.
960 BODY is the sequence of statements that will be executed once.
961 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
962 copyprivate, nowait. */
965 gimple_build_omp_single (gimple_seq body, tree clauses)
967 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
969 gimple_omp_set_body (p, body);
970 gimple_omp_single_set_clauses (p, clauses);
976 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
979 gimple_build_omp_atomic_load (tree lhs, tree rhs)
981 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
982 gimple_omp_atomic_load_set_lhs (p, lhs);
983 gimple_omp_atomic_load_set_rhs (p, rhs);
987 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
989 VAL is the value we are storing. */
992 gimple_build_omp_atomic_store (tree val)
994 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
995 gimple_omp_atomic_store_set_val (p, val);
999 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1000 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1003 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1005 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1006 /* Ensure all the predictors fit into the lower bits of the subcode. */
1007 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1008 gimple_predict_set_predictor (p, predictor);
1009 gimple_predict_set_outcome (p, outcome);
1013 #if defined ENABLE_GIMPLE_CHECKING
1014 /* Complain of a gimple type mismatch and die. */
1017 gimple_check_failed (const_gimple gs, const char *file, int line,
1018 const char *function, enum gimple_code code,
1019 enum tree_code subcode)
1021 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1022 gimple_code_name[code],
1023 tree_code_name[subcode],
1024 gimple_code_name[gimple_code (gs)],
1025 gs->gsbase.subcode > 0
1026 ? tree_code_name[gs->gsbase.subcode]
1028 function, trim_filename (file), line);
1030 #endif /* ENABLE_GIMPLE_CHECKING */
1033 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1034 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1038 gimple_seq_alloc (void)
1040 gimple_seq seq = gimple_seq_cache;
1043 gimple_seq_cache = gimple_seq_cache->next_free;
1044 gcc_assert (gimple_seq_cache != seq);
1045 memset (seq, 0, sizeof (*seq));
1049 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1050 #ifdef GATHER_STATISTICS
1051 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1052 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1059 /* Return SEQ to the free pool of GIMPLE sequences. */
1062 gimple_seq_free (gimple_seq seq)
1067 gcc_assert (gimple_seq_first (seq) == NULL);
1068 gcc_assert (gimple_seq_last (seq) == NULL);
1070 /* If this triggers, it's a sign that the same list is being freed
1072 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1074 /* Add SEQ to the pool of free sequences. */
1075 seq->next_free = gimple_seq_cache;
1076 gimple_seq_cache = seq;
1080 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1081 *SEQ_P is NULL, a new sequence is allocated. */
1084 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1086 gimple_stmt_iterator si;
1092 *seq_p = gimple_seq_alloc ();
1094 si = gsi_last (*seq_p);
1095 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1099 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1100 NULL, a new sequence is allocated. */
1103 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1105 gimple_stmt_iterator si;
1111 *dst_p = gimple_seq_alloc ();
1113 si = gsi_last (*dst_p);
1114 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1118 /* Helper function of empty_body_p. Return true if STMT is an empty
1122 empty_stmt_p (gimple stmt)
1124 if (gimple_code (stmt) == GIMPLE_NOP)
1126 if (gimple_code (stmt) == GIMPLE_BIND)
1127 return empty_body_p (gimple_bind_body (stmt));
1132 /* Return true if BODY contains nothing but empty statements. */
1135 empty_body_p (gimple_seq body)
1137 gimple_stmt_iterator i;
1139 if (gimple_seq_empty_p (body))
1141 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1142 if (!empty_stmt_p (gsi_stmt (i))
1143 && !is_gimple_debug (gsi_stmt (i)))
1150 /* Perform a deep copy of sequence SRC and return the result. */
1153 gimple_seq_copy (gimple_seq src)
1155 gimple_stmt_iterator gsi;
1156 gimple_seq new_seq = gimple_seq_alloc ();
1159 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1161 stmt = gimple_copy (gsi_stmt (gsi));
1162 gimple_seq_add_stmt (&new_seq, stmt);
1169 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1170 on each one. WI is as in walk_gimple_stmt.
1172 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1173 value is stored in WI->CALLBACK_RESULT and the statement that
1174 produced the value is returned.
1176 Otherwise, all the statements are walked and NULL returned. */
1179 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1180 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1182 gimple_stmt_iterator gsi;
1184 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1186 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1189 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1192 wi->callback_result = ret;
1193 return gsi_stmt (gsi);
1198 wi->callback_result = NULL_TREE;
1204 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1207 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1208 struct walk_stmt_info *wi)
1212 const char **oconstraints;
1214 const char *constraint;
1215 bool allows_mem, allows_reg, is_inout;
1217 noutputs = gimple_asm_noutputs (stmt);
1218 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1223 for (i = 0; i < noutputs; i++)
1225 op = gimple_asm_output_op (stmt, i);
1226 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1227 oconstraints[i] = constraint;
1228 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1231 wi->val_only = (allows_reg || !allows_mem);
1232 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1237 n = gimple_asm_ninputs (stmt);
1238 for (i = 0; i < n; i++)
1240 op = gimple_asm_input_op (stmt, i);
1241 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1242 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1243 oconstraints, &allows_mem, &allows_reg);
1246 wi->val_only = (allows_reg || !allows_mem);
1247 /* Although input "m" is not really a LHS, we need a lvalue. */
1248 wi->is_lhs = !wi->val_only;
1250 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1258 wi->val_only = true;
1261 n = gimple_asm_nlabels (stmt);
1262 for (i = 0; i < n; i++)
1264 op = gimple_asm_label_op (stmt, i);
1265 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1274 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1275 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1277 CALLBACK_OP is called on each operand of STMT via walk_tree.
1278 Additional parameters to walk_tree must be stored in WI. For each operand
1279 OP, walk_tree is called as:
1281 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1283 If CALLBACK_OP returns non-NULL for an operand, the remaining
1284 operands are not scanned.
1286 The return value is that returned by the last call to walk_tree, or
1287 NULL_TREE if no CALLBACK_OP is specified. */
1290 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1291 struct walk_stmt_info *wi)
1293 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1295 tree ret = NULL_TREE;
1297 switch (gimple_code (stmt))
1300 /* Walk the RHS operands. A formal temporary LHS may use a
1301 COMPONENT_REF RHS. */
1303 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1304 || !gimple_assign_single_p (stmt);
1306 for (i = 1; i < gimple_num_ops (stmt); i++)
1308 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1314 /* Walk the LHS. If the RHS is appropriate for a memory, we
1315 may use a COMPONENT_REF on the LHS. */
1318 /* If the RHS has more than 1 operand, it is not appropriate
1320 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1321 || !gimple_assign_single_p (stmt);
1325 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1331 wi->val_only = true;
1340 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1344 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1348 for (i = 0; i < gimple_call_num_args (stmt); i++)
1350 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1359 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1368 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1374 case GIMPLE_EH_FILTER:
1375 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1382 ret = walk_gimple_asm (stmt, callback_op, wi);
1387 case GIMPLE_OMP_CONTINUE:
1388 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1389 callback_op, wi, pset);
1393 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1394 callback_op, wi, pset);
1399 case GIMPLE_OMP_CRITICAL:
1400 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1406 case GIMPLE_OMP_FOR:
1407 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1411 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1413 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1417 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1421 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1425 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1432 case GIMPLE_OMP_PARALLEL:
1433 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1437 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1441 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1447 case GIMPLE_OMP_TASK:
1448 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1452 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1456 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1460 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1464 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1468 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1474 case GIMPLE_OMP_SECTIONS:
1475 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1480 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1487 case GIMPLE_OMP_SINGLE:
1488 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1494 case GIMPLE_OMP_ATOMIC_LOAD:
1495 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1500 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1506 case GIMPLE_OMP_ATOMIC_STORE:
1507 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1513 /* Tuples that do not have operands. */
1516 case GIMPLE_OMP_RETURN:
1517 case GIMPLE_PREDICT:
1522 enum gimple_statement_structure_enum gss;
1523 gss = gimple_statement_structure (stmt);
1524 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1525 for (i = 0; i < gimple_num_ops (stmt); i++)
1527 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1539 /* Walk the current statement in GSI (optionally using traversal state
1540 stored in WI). If WI is NULL, no state is kept during traversal.
1541 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1542 that it has handled all the operands of the statement, its return
1543 value is returned. Otherwise, the return value from CALLBACK_STMT
1544 is discarded and its operands are scanned.
1546 If CALLBACK_STMT is NULL or it didn't handle the operands,
1547 CALLBACK_OP is called on each operand of the statement via
1548 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1549 operand, the remaining operands are not scanned. In this case, the
1550 return value from CALLBACK_OP is returned.
1552 In any other case, NULL_TREE is returned. */
1555 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1556 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1560 gimple stmt = gsi_stmt (*gsi);
1565 if (wi && wi->want_locations && gimple_has_location (stmt))
1566 input_location = gimple_location (stmt);
1570 /* Invoke the statement callback. Return if the callback handled
1571 all of STMT operands by itself. */
1574 bool handled_ops = false;
1575 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1579 /* If CALLBACK_STMT did not handle operands, it should not have
1580 a value to return. */
1581 gcc_assert (tree_ret == NULL);
1583 /* Re-read stmt in case the callback changed it. */
1584 stmt = gsi_stmt (*gsi);
1587 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1590 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1595 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1596 switch (gimple_code (stmt))
1599 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1602 return wi->callback_result;
1606 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1609 return wi->callback_result;
1612 case GIMPLE_EH_FILTER:
1613 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1616 return wi->callback_result;
1620 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1623 return wi->callback_result;
1625 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1628 return wi->callback_result;
1631 case GIMPLE_OMP_FOR:
1632 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1635 return wi->callback_result;
1638 case GIMPLE_OMP_CRITICAL:
1639 case GIMPLE_OMP_MASTER:
1640 case GIMPLE_OMP_ORDERED:
1641 case GIMPLE_OMP_SECTION:
1642 case GIMPLE_OMP_PARALLEL:
1643 case GIMPLE_OMP_TASK:
1644 case GIMPLE_OMP_SECTIONS:
1645 case GIMPLE_OMP_SINGLE:
1646 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1649 return wi->callback_result;
1652 case GIMPLE_WITH_CLEANUP_EXPR:
1653 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1656 return wi->callback_result;
1660 gcc_assert (!gimple_has_substatements (stmt));
1668 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1671 gimple_set_body (tree fndecl, gimple_seq seq)
1673 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1676 /* If FNDECL still does not have a function structure associated
1677 with it, then it does not make sense for it to receive a
1679 gcc_assert (seq == NULL);
1682 fn->gimple_body = seq;
1686 /* Return the body of GIMPLE statements for function FN. */
1689 gimple_body (tree fndecl)
1691 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1692 return fn ? fn->gimple_body : NULL;
1695 /* Return true when FNDECL has Gimple body either in unlowered
1698 gimple_has_body_p (tree fndecl)
1700 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1701 return (gimple_body (fndecl) || (fn && fn->cfg));
1704 /* Detect flags from a GIMPLE_CALL. This is just like
1705 call_expr_flags, but for gimple tuples. */
1708 gimple_call_flags (const_gimple stmt)
1711 tree decl = gimple_call_fndecl (stmt);
1715 flags = flags_from_decl_or_type (decl);
1718 t = TREE_TYPE (gimple_call_fn (stmt));
1719 if (t && TREE_CODE (t) == POINTER_TYPE)
1720 flags = flags_from_decl_or_type (TREE_TYPE (t));
1729 /* Return true if GS is a copy assignment. */
1732 gimple_assign_copy_p (gimple gs)
1734 return gimple_code (gs) == GIMPLE_ASSIGN
1735 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1736 == GIMPLE_SINGLE_RHS
1737 && is_gimple_val (gimple_op (gs, 1));
1741 /* Return true if GS is a SSA_NAME copy assignment. */
1744 gimple_assign_ssa_name_copy_p (gimple gs)
1746 return (gimple_code (gs) == GIMPLE_ASSIGN
1747 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1748 == GIMPLE_SINGLE_RHS)
1749 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1750 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1754 /* Return true if GS is an assignment with a singleton RHS, i.e.,
1755 there is no operator associated with the assignment itself.
1756 Unlike gimple_assign_copy_p, this predicate returns true for
1757 any RHS operand, including those that perform an operation
1758 and do not have the semantics of a copy, such as COND_EXPR. */
1761 gimple_assign_single_p (gimple gs)
1763 return (gimple_code (gs) == GIMPLE_ASSIGN
1764 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1765 == GIMPLE_SINGLE_RHS);
1768 /* Return true if GS is an assignment with a unary RHS, but the
1769 operator has no effect on the assigned value. The logic is adapted
1770 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1771 instances in which STRIP_NOPS was previously applied to the RHS of
1774 NOTE: In the use cases that led to the creation of this function
1775 and of gimple_assign_single_p, it is typical to test for either
1776 condition and to proceed in the same manner. In each case, the
1777 assigned value is represented by the single RHS operand of the
1778 assignment. I suspect there may be cases where gimple_assign_copy_p,
1779 gimple_assign_single_p, or equivalent logic is used where a similar
1780 treatment of unary NOPs is appropriate. */
1783 gimple_assign_unary_nop_p (gimple gs)
1785 return (gimple_code (gs) == GIMPLE_ASSIGN
1786 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1787 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1788 && gimple_assign_rhs1 (gs) != error_mark_node
1789 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1790 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1793 /* Set BB to be the basic block holding G. */
1796 gimple_set_bb (gimple stmt, basic_block bb)
1798 stmt->gsbase.bb = bb;
1800 /* If the statement is a label, add the label to block-to-labels map
1801 so that we can speed up edge creation for GIMPLE_GOTOs. */
1802 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1807 t = gimple_label_label (stmt);
1808 uid = LABEL_DECL_UID (t);
1811 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1812 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1813 if (old_len <= (unsigned) uid)
1815 unsigned new_len = 3 * uid / 2 + 1;
1817 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1822 VEC_replace (basic_block, label_to_block_map, uid, bb);
1827 /* Fold the expression computed by STMT. If the expression can be
1828 folded, return the folded result, otherwise return NULL. STMT is
1832 gimple_fold (const_gimple stmt)
1834 location_t loc = gimple_location (stmt);
1835 switch (gimple_code (stmt))
1838 return fold_binary_loc (loc, gimple_cond_code (stmt),
1840 gimple_cond_lhs (stmt),
1841 gimple_cond_rhs (stmt));
1844 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1846 case GIMPLE_UNARY_RHS:
1847 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
1848 TREE_TYPE (gimple_assign_lhs (stmt)),
1849 gimple_assign_rhs1 (stmt));
1850 case GIMPLE_BINARY_RHS:
1851 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
1852 TREE_TYPE (gimple_assign_lhs (stmt)),
1853 gimple_assign_rhs1 (stmt),
1854 gimple_assign_rhs2 (stmt));
1855 case GIMPLE_SINGLE_RHS:
1856 return fold (gimple_assign_rhs1 (stmt));
1862 return gimple_switch_index (stmt);
1875 /* Modify the RHS of the assignment pointed-to by GSI using the
1876 operands in the expression tree EXPR.
1878 NOTE: The statement pointed-to by GSI may be reallocated if it
1879 did not have enough operand slots.
1881 This function is useful to convert an existing tree expression into
1882 the flat representation used for the RHS of a GIMPLE assignment.
1883 It will reallocate memory as needed to expand or shrink the number
1884 of operand slots needed to represent EXPR.
1886 NOTE: If you find yourself building a tree and then calling this
1887 function, you are most certainly doing it the slow way. It is much
1888 better to build a new assignment or to use the function
1889 gimple_assign_set_rhs_with_ops, which does not require an
1890 expression tree to be built. */
1893 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1895 enum tree_code subcode;
1898 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1899 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1903 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1904 operands OP1 and OP2.
1906 NOTE: The statement pointed-to by GSI may be reallocated if it
1907 did not have enough operand slots. */
1910 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1913 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1914 gimple stmt = gsi_stmt (*gsi);
1916 /* If the new CODE needs more operands, allocate a new statement. */
1917 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1919 tree lhs = gimple_assign_lhs (stmt);
1920 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1921 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1922 gsi_replace (gsi, new_stmt, true);
1925 /* The LHS needs to be reset as this also changes the SSA name
1927 gimple_assign_set_lhs (stmt, lhs);
1930 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1931 gimple_set_subcode (stmt, code);
1932 gimple_assign_set_rhs1 (stmt, op1);
1933 if (new_rhs_ops > 1)
1934 gimple_assign_set_rhs2 (stmt, op2);
1938 /* Return the LHS of a statement that performs an assignment,
1939 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1940 for a call to a function that returns no value, or for a
1941 statement other than an assignment or a call. */
1944 gimple_get_lhs (const_gimple stmt)
1946 enum gimple_code code = gimple_code (stmt);
1948 if (code == GIMPLE_ASSIGN)
1949 return gimple_assign_lhs (stmt);
1950 else if (code == GIMPLE_CALL)
1951 return gimple_call_lhs (stmt);
1957 /* Set the LHS of a statement that performs an assignment,
1958 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1961 gimple_set_lhs (gimple stmt, tree lhs)
1963 enum gimple_code code = gimple_code (stmt);
1965 if (code == GIMPLE_ASSIGN)
1966 gimple_assign_set_lhs (stmt, lhs);
1967 else if (code == GIMPLE_CALL)
1968 gimple_call_set_lhs (stmt, lhs);
1974 /* Return a deep copy of statement STMT. All the operands from STMT
1975 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1976 and VUSE operand arrays are set to empty in the new copy. */
1979 gimple_copy (gimple stmt)
1981 enum gimple_code code = gimple_code (stmt);
1982 unsigned num_ops = gimple_num_ops (stmt);
1983 gimple copy = gimple_alloc (code, num_ops);
1986 /* Shallow copy all the fields from STMT. */
1987 memcpy (copy, stmt, gimple_size (code));
1989 /* If STMT has sub-statements, deep-copy them as well. */
1990 if (gimple_has_substatements (stmt))
1995 switch (gimple_code (stmt))
1998 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1999 gimple_bind_set_body (copy, new_seq);
2000 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2001 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2005 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2006 gimple_catch_set_handler (copy, new_seq);
2007 t = unshare_expr (gimple_catch_types (stmt));
2008 gimple_catch_set_types (copy, t);
2011 case GIMPLE_EH_FILTER:
2012 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2013 gimple_eh_filter_set_failure (copy, new_seq);
2014 t = unshare_expr (gimple_eh_filter_types (stmt));
2015 gimple_eh_filter_set_types (copy, t);
2019 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2020 gimple_try_set_eval (copy, new_seq);
2021 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2022 gimple_try_set_cleanup (copy, new_seq);
2025 case GIMPLE_OMP_FOR:
2026 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2027 gimple_omp_for_set_pre_body (copy, new_seq);
2028 t = unshare_expr (gimple_omp_for_clauses (stmt));
2029 gimple_omp_for_set_clauses (copy, t);
2030 copy->gimple_omp_for.iter
2031 = GGC_NEWVEC (struct gimple_omp_for_iter,
2032 gimple_omp_for_collapse (stmt));
2033 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2035 gimple_omp_for_set_cond (copy, i,
2036 gimple_omp_for_cond (stmt, i));
2037 gimple_omp_for_set_index (copy, i,
2038 gimple_omp_for_index (stmt, i));
2039 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2040 gimple_omp_for_set_initial (copy, i, t);
2041 t = unshare_expr (gimple_omp_for_final (stmt, i));
2042 gimple_omp_for_set_final (copy, i, t);
2043 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2044 gimple_omp_for_set_incr (copy, i, t);
2048 case GIMPLE_OMP_PARALLEL:
2049 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2050 gimple_omp_parallel_set_clauses (copy, t);
2051 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2052 gimple_omp_parallel_set_child_fn (copy, t);
2053 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2054 gimple_omp_parallel_set_data_arg (copy, t);
2057 case GIMPLE_OMP_TASK:
2058 t = unshare_expr (gimple_omp_task_clauses (stmt));
2059 gimple_omp_task_set_clauses (copy, t);
2060 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2061 gimple_omp_task_set_child_fn (copy, t);
2062 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2063 gimple_omp_task_set_data_arg (copy, t);
2064 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2065 gimple_omp_task_set_copy_fn (copy, t);
2066 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2067 gimple_omp_task_set_arg_size (copy, t);
2068 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2069 gimple_omp_task_set_arg_align (copy, t);
2072 case GIMPLE_OMP_CRITICAL:
2073 t = unshare_expr (gimple_omp_critical_name (stmt));
2074 gimple_omp_critical_set_name (copy, t);
2077 case GIMPLE_OMP_SECTIONS:
2078 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2079 gimple_omp_sections_set_clauses (copy, t);
2080 t = unshare_expr (gimple_omp_sections_control (stmt));
2081 gimple_omp_sections_set_control (copy, t);
2084 case GIMPLE_OMP_SINGLE:
2085 case GIMPLE_OMP_SECTION:
2086 case GIMPLE_OMP_MASTER:
2087 case GIMPLE_OMP_ORDERED:
2089 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2090 gimple_omp_set_body (copy, new_seq);
2093 case GIMPLE_WITH_CLEANUP_EXPR:
2094 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2095 gimple_wce_set_cleanup (copy, new_seq);
2103 /* Make copy of operands. */
2106 for (i = 0; i < num_ops; i++)
2107 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2109 /* Clear out SSA operand vectors on COPY. */
2110 if (gimple_has_ops (stmt))
2112 gimple_set_def_ops (copy, NULL);
2113 gimple_set_use_ops (copy, NULL);
2116 if (gimple_has_mem_ops (stmt))
2118 gimple_set_vdef (copy, gimple_vdef (stmt));
2119 gimple_set_vuse (copy, gimple_vuse (stmt));
2122 /* SSA operands need to be updated. */
2123 gimple_set_modified (copy, true);
2130 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2131 a MODIFIED field. */
2134 gimple_set_modified (gimple s, bool modifiedp)
2136 if (gimple_has_ops (s))
2138 s->gsbase.modified = (unsigned) modifiedp;
2142 && is_gimple_call (s)
2143 && gimple_call_noreturn_p (s))
2144 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2149 /* Return true if statement S has side-effects. We consider a
2150 statement to have side effects if:
2152 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2153 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2156 gimple_has_side_effects (const_gimple s)
2160 if (is_gimple_debug (s))
2163 /* We don't have to scan the arguments to check for
2164 volatile arguments, though, at present, we still
2165 do a scan to check for TREE_SIDE_EFFECTS. */
2166 if (gimple_has_volatile_ops (s))
2169 if (is_gimple_call (s))
2171 unsigned nargs = gimple_call_num_args (s);
2173 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2175 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2176 /* An infinite loop is considered a side effect. */
2179 if (gimple_call_lhs (s)
2180 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2182 gcc_assert (gimple_has_volatile_ops (s));
2186 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2189 for (i = 0; i < nargs; i++)
2190 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2192 gcc_assert (gimple_has_volatile_ops (s));
2200 for (i = 0; i < gimple_num_ops (s); i++)
2201 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2203 gcc_assert (gimple_has_volatile_ops (s));
2211 /* Return true if the RHS of statement S has side effects.
2212 We may use it to determine if it is admissable to replace
2213 an assignment or call with a copy of a previously-computed
2214 value. In such cases, side-effects due the the LHS are
2218 gimple_rhs_has_side_effects (const_gimple s)
2222 if (is_gimple_call (s))
2224 unsigned nargs = gimple_call_num_args (s);
2226 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2229 /* We cannot use gimple_has_volatile_ops here,
2230 because we must ignore a volatile LHS. */
2231 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2232 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2234 gcc_assert (gimple_has_volatile_ops (s));
2238 for (i = 0; i < nargs; i++)
2239 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2240 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2245 else if (is_gimple_assign (s))
2247 /* Skip the first operand, the LHS. */
2248 for (i = 1; i < gimple_num_ops (s); i++)
2249 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2250 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2252 gcc_assert (gimple_has_volatile_ops (s));
2256 else if (is_gimple_debug (s))
2260 /* For statements without an LHS, examine all arguments. */
2261 for (i = 0; i < gimple_num_ops (s); i++)
2262 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2263 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2265 gcc_assert (gimple_has_volatile_ops (s));
2274 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2275 Return true if S can trap. If INCLUDE_LHS is true and S is a
2276 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2277 Otherwise, only the RHS of the assignment is checked. */
2280 gimple_could_trap_p_1 (gimple s, bool include_lhs)
2283 tree t, div = NULL_TREE;
2286 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2288 for (i = start; i < gimple_num_ops (s); i++)
2289 if (tree_could_trap_p (gimple_op (s, i)))
2292 switch (gimple_code (s))
2295 return gimple_asm_volatile_p (s);
2298 t = gimple_call_fndecl (s);
2299 /* Assume that calls to weak functions may trap. */
2300 if (!t || !DECL_P (t) || DECL_WEAK (t))
2305 t = gimple_expr_type (s);
2306 op = gimple_assign_rhs_code (s);
2307 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2308 div = gimple_assign_rhs2 (s);
2309 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2310 (INTEGRAL_TYPE_P (t)
2311 && TYPE_OVERFLOW_TRAPS (t)),
2323 /* Return true if statement S can trap. */
2326 gimple_could_trap_p (gimple s)
2328 return gimple_could_trap_p_1 (s, true);
2332 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2335 gimple_assign_rhs_could_trap_p (gimple s)
2337 gcc_assert (is_gimple_assign (s));
2338 return gimple_could_trap_p_1 (s, false);
2342 /* Print debugging information for gimple stmts generated. */
2345 dump_gimple_statistics (void)
2347 #ifdef GATHER_STATISTICS
2348 int i, total_tuples = 0, total_bytes = 0;
2350 fprintf (stderr, "\nGIMPLE statements\n");
2351 fprintf (stderr, "Kind Stmts Bytes\n");
2352 fprintf (stderr, "---------------------------------------\n");
2353 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2355 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2356 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2357 total_tuples += gimple_alloc_counts[i];
2358 total_bytes += gimple_alloc_sizes[i];
2360 fprintf (stderr, "---------------------------------------\n");
2361 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2362 fprintf (stderr, "---------------------------------------\n");
2364 fprintf (stderr, "No gimple statistics\n");
2369 /* Return the number of operands needed on the RHS of a GIMPLE
2370 assignment for an expression with tree code CODE. */
2373 get_gimple_rhs_num_ops (enum tree_code code)
2375 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2377 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2379 else if (rhs_class == GIMPLE_BINARY_RHS)
2385 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2387 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2388 : ((TYPE) == tcc_binary \
2389 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2390 : ((TYPE) == tcc_constant \
2391 || (TYPE) == tcc_declaration \
2392 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2393 : ((SYM) == TRUTH_AND_EXPR \
2394 || (SYM) == TRUTH_OR_EXPR \
2395 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2396 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2397 : ((SYM) == COND_EXPR \
2398 || (SYM) == CONSTRUCTOR \
2399 || (SYM) == OBJ_TYPE_REF \
2400 || (SYM) == ASSERT_EXPR \
2401 || (SYM) == ADDR_EXPR \
2402 || (SYM) == WITH_SIZE_EXPR \
2403 || (SYM) == SSA_NAME \
2404 || (SYM) == POLYNOMIAL_CHREC \
2405 || (SYM) == DOT_PROD_EXPR \
2406 || (SYM) == VEC_COND_EXPR \
2407 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2408 : GIMPLE_INVALID_RHS),
2409 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2411 const unsigned char gimple_rhs_class_table[] = {
2412 #include "all-tree.def"
2416 #undef END_OF_BASE_TREE_CODES
2418 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2420 /* Validation of GIMPLE expressions. */
2422 /* Return true if OP is an acceptable tree node to be used as a GIMPLE
2426 is_gimple_operand (const_tree op)
2428 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2431 /* Returns true iff T is a valid RHS for an assignment to a renamed
2432 user -- or front-end generated artificial -- variable. */
2435 is_gimple_reg_rhs (tree t)
2437 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2440 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2441 LHS, or for a call argument. */
2444 is_gimple_mem_rhs (tree t)
2446 /* If we're dealing with a renamable type, either source or dest must be
2447 a renamed variable. */
2448 if (is_gimple_reg_type (TREE_TYPE (t)))
2449 return is_gimple_val (t);
2451 return is_gimple_val (t) || is_gimple_lvalue (t);
2454 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2457 is_gimple_lvalue (tree t)
2459 return (is_gimple_addressable (t)
2460 || TREE_CODE (t) == WITH_SIZE_EXPR
2461 /* These are complex lvalues, but don't have addresses, so they
2463 || TREE_CODE (t) == BIT_FIELD_REF);
2466 /* Return true if T is a GIMPLE condition. */
2469 is_gimple_condexpr (tree t)
2471 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2472 && !tree_could_trap_p (t)
2473 && is_gimple_val (TREE_OPERAND (t, 0))
2474 && is_gimple_val (TREE_OPERAND (t, 1))));
2477 /* Return true if T is something whose address can be taken. */
2480 is_gimple_addressable (tree t)
2482 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2485 /* Return true if T is a valid gimple constant. */
2488 is_gimple_constant (const_tree t)
2490 switch (TREE_CODE (t))
2500 /* Vector constant constructors are gimple invariant. */
2502 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2503 return TREE_CONSTANT (t);
2512 /* Return true if T is a gimple address. */
2515 is_gimple_address (const_tree t)
2519 if (TREE_CODE (t) != ADDR_EXPR)
2522 op = TREE_OPERAND (t, 0);
2523 while (handled_component_p (op))
2525 if ((TREE_CODE (op) == ARRAY_REF
2526 || TREE_CODE (op) == ARRAY_RANGE_REF)
2527 && !is_gimple_val (TREE_OPERAND (op, 1)))
2530 op = TREE_OPERAND (op, 0);
2533 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2536 switch (TREE_CODE (op))
2551 /* Strip out all handled components that produce invariant
2555 strip_invariant_refs (const_tree op)
2557 while (handled_component_p (op))
2559 switch (TREE_CODE (op))
2562 case ARRAY_RANGE_REF:
2563 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2564 || TREE_OPERAND (op, 2) != NULL_TREE
2565 || TREE_OPERAND (op, 3) != NULL_TREE)
2570 if (TREE_OPERAND (op, 2) != NULL_TREE)
2576 op = TREE_OPERAND (op, 0);
2582 /* Return true if T is a gimple invariant address. */
2585 is_gimple_invariant_address (const_tree t)
2589 if (TREE_CODE (t) != ADDR_EXPR)
2592 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2594 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2597 /* Return true if T is a gimple invariant address at IPA level
2598 (so addresses of variables on stack are not allowed). */
2601 is_gimple_ip_invariant_address (const_tree t)
2605 if (TREE_CODE (t) != ADDR_EXPR)
2608 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2610 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2613 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2614 form of function invariant. */
2617 is_gimple_min_invariant (const_tree t)
2619 if (TREE_CODE (t) == ADDR_EXPR)
2620 return is_gimple_invariant_address (t);
2622 return is_gimple_constant (t);
2625 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2626 form of gimple minimal invariant. */
2629 is_gimple_ip_invariant (const_tree t)
2631 if (TREE_CODE (t) == ADDR_EXPR)
2632 return is_gimple_ip_invariant_address (t);
2634 return is_gimple_constant (t);
2637 /* Return true if T looks like a valid GIMPLE statement. */
2640 is_gimple_stmt (tree t)
2642 const enum tree_code code = TREE_CODE (t);
2647 /* The only valid NOP_EXPR is the empty statement. */
2648 return IS_EMPTY_STMT (t);
2652 /* These are only valid if they're void. */
2653 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2659 case CASE_LABEL_EXPR:
2660 case TRY_CATCH_EXPR:
2661 case TRY_FINALLY_EXPR:
2662 case EH_FILTER_EXPR:
2665 case STATEMENT_LIST:
2675 /* These are always void. */
2681 /* These are valid regardless of their type. */
2689 /* Return true if T is a variable. */
2692 is_gimple_variable (tree t)
2694 return (TREE_CODE (t) == VAR_DECL
2695 || TREE_CODE (t) == PARM_DECL
2696 || TREE_CODE (t) == RESULT_DECL
2697 || TREE_CODE (t) == SSA_NAME);
2700 /* Return true if T is a GIMPLE identifier (something with an address). */
2703 is_gimple_id (tree t)
2705 return (is_gimple_variable (t)
2706 || TREE_CODE (t) == FUNCTION_DECL
2707 || TREE_CODE (t) == LABEL_DECL
2708 || TREE_CODE (t) == CONST_DECL
2709 /* Allow string constants, since they are addressable. */
2710 || TREE_CODE (t) == STRING_CST);
2713 /* Return true if TYPE is a suitable type for a scalar register variable. */
2716 is_gimple_reg_type (tree type)
2718 return !AGGREGATE_TYPE_P (type);
2721 /* Return true if T is a non-aggregate register variable. */
2724 is_gimple_reg (tree t)
2726 if (TREE_CODE (t) == SSA_NAME)
2727 t = SSA_NAME_VAR (t);
2729 if (!is_gimple_variable (t))
2732 if (!is_gimple_reg_type (TREE_TYPE (t)))
2735 /* A volatile decl is not acceptable because we can't reuse it as
2736 needed. We need to copy it into a temp first. */
2737 if (TREE_THIS_VOLATILE (t))
2740 /* We define "registers" as things that can be renamed as needed,
2741 which with our infrastructure does not apply to memory. */
2742 if (needs_to_live_in_memory (t))
2745 /* Hard register variables are an interesting case. For those that
2746 are call-clobbered, we don't know where all the calls are, since
2747 we don't (want to) take into account which operations will turn
2748 into libcalls at the rtl level. For those that are call-saved,
2749 we don't currently model the fact that calls may in fact change
2750 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2751 level, and so miss variable changes that might imply. All around,
2752 it seems safest to not do too much optimization with these at the
2753 tree level at all. We'll have to rely on the rtl optimizers to
2754 clean this up, as there we've got all the appropriate bits exposed. */
2755 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2758 /* Complex and vector values must have been put into SSA-like form.
2759 That is, no assignments to the individual components. */
2760 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2761 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2762 return DECL_GIMPLE_REG_P (t);
2768 /* Return true if T is a GIMPLE variable whose address is not needed. */
2771 is_gimple_non_addressable (tree t)
2773 if (TREE_CODE (t) == SSA_NAME)
2774 t = SSA_NAME_VAR (t);
2776 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2779 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2782 is_gimple_val (tree t)
2784 /* Make loads from volatiles and memory vars explicit. */
2785 if (is_gimple_variable (t)
2786 && is_gimple_reg_type (TREE_TYPE (t))
2787 && !is_gimple_reg (t))
2790 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2793 /* Similarly, but accept hard registers as inputs to asm statements. */
2796 is_gimple_asm_val (tree t)
2798 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2801 return is_gimple_val (t);
2804 /* Return true if T is a GIMPLE minimal lvalue. */
2807 is_gimple_min_lval (tree t)
2809 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2811 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2814 /* Return true if T is a typecast operation. */
2817 is_gimple_cast (tree t)
2819 return (CONVERT_EXPR_P (t)
2820 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2823 /* Return true if T is a valid function operand of a CALL_EXPR. */
2826 is_gimple_call_addr (tree t)
2828 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2831 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2832 Otherwise, return NULL_TREE. */
2835 get_call_expr_in (tree t)
2837 if (TREE_CODE (t) == MODIFY_EXPR)
2838 t = TREE_OPERAND (t, 1);
2839 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2840 t = TREE_OPERAND (t, 0);
2841 if (TREE_CODE (t) == CALL_EXPR)
2847 /* Given a memory reference expression T, return its base address.
2848 The base address of a memory reference expression is the main
2849 object being referenced. For instance, the base address for
2850 'array[i].fld[j]' is 'array'. You can think of this as stripping
2851 away the offset part from a memory address.
2853 This function calls handled_component_p to strip away all the inner
2854 parts of the memory reference until it reaches the base object. */
2857 get_base_address (tree t)
2859 while (handled_component_p (t))
2860 t = TREE_OPERAND (t, 0);
2863 || TREE_CODE (t) == STRING_CST
2864 || TREE_CODE (t) == CONSTRUCTOR
2865 || INDIRECT_REF_P (t))
2872 recalculate_side_effects (tree t)
2874 enum tree_code code = TREE_CODE (t);
2875 int len = TREE_OPERAND_LENGTH (t);
2878 switch (TREE_CODE_CLASS (code))
2880 case tcc_expression:
2886 case PREDECREMENT_EXPR:
2887 case PREINCREMENT_EXPR:
2888 case POSTDECREMENT_EXPR:
2889 case POSTINCREMENT_EXPR:
2890 /* All of these have side-effects, no matter what their
2899 case tcc_comparison: /* a comparison expression */
2900 case tcc_unary: /* a unary arithmetic expression */
2901 case tcc_binary: /* a binary arithmetic expression */
2902 case tcc_reference: /* a reference */
2903 case tcc_vl_exp: /* a function call */
2904 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2905 for (i = 0; i < len; ++i)
2907 tree op = TREE_OPERAND (t, i);
2908 if (op && TREE_SIDE_EFFECTS (op))
2909 TREE_SIDE_EFFECTS (t) = 1;
2914 /* No side-effects. */
2922 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2923 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2924 we failed to create one. */
2927 canonicalize_cond_expr_cond (tree t)
2929 /* For (bool)x use x != 0. */
2930 if (TREE_CODE (t) == NOP_EXPR
2931 && TREE_TYPE (t) == boolean_type_node)
2933 tree top0 = TREE_OPERAND (t, 0);
2934 t = build2 (NE_EXPR, TREE_TYPE (t),
2935 top0, build_int_cst (TREE_TYPE (top0), 0));
2937 /* For !x use x == 0. */
2938 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2940 tree top0 = TREE_OPERAND (t, 0);
2941 t = build2 (EQ_EXPR, TREE_TYPE (t),
2942 top0, build_int_cst (TREE_TYPE (top0), 0));
2944 /* For cmp ? 1 : 0 use cmp. */
2945 else if (TREE_CODE (t) == COND_EXPR
2946 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2947 && integer_onep (TREE_OPERAND (t, 1))
2948 && integer_zerop (TREE_OPERAND (t, 2)))
2950 tree top0 = TREE_OPERAND (t, 0);
2951 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2952 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2955 if (is_gimple_condexpr (t))
2961 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2962 the positions marked by the set ARGS_TO_SKIP. */
2965 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2968 tree fn = gimple_call_fn (stmt);
2969 int nargs = gimple_call_num_args (stmt);
2970 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
2973 for (i = 0; i < nargs; i++)
2974 if (!bitmap_bit_p (args_to_skip, i))
2975 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
2977 new_stmt = gimple_build_call_vec (fn, vargs);
2978 VEC_free (tree, heap, vargs);
2979 if (gimple_call_lhs (stmt))
2980 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2982 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2983 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2985 gimple_set_block (new_stmt, gimple_block (stmt));
2986 if (gimple_has_location (stmt))
2987 gimple_set_location (new_stmt, gimple_location (stmt));
2989 /* Carry all the flags to the new GIMPLE_CALL. */
2990 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2991 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
2992 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
2993 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
2994 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
2995 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
2997 gimple_set_modified (new_stmt, true);
3003 /* Data structure used to count the number of dereferences to PTR
3004 inside an expression. */
3008 unsigned num_stores;
3012 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
3013 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3016 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3018 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3019 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3021 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3022 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3023 the address of 'fld' as 'ptr + offsetof(fld)'. */
3024 if (TREE_CODE (*tp) == ADDR_EXPR)
3030 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3033 count_p->num_stores++;
3035 count_p->num_loads++;
3041 /* Count the number of direct and indirect uses for pointer PTR in
3042 statement STMT. The number of direct uses is stored in
3043 *NUM_USES_P. Indirect references are counted separately depending
3044 on whether they are store or load operations. The counts are
3045 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3048 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3049 unsigned *num_loads_p, unsigned *num_stores_p)
3058 /* Find out the total number of uses of PTR in STMT. */
3059 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3063 /* Now count the number of indirect references to PTR. This is
3064 truly awful, but we don't have much choice. There are no parent
3065 pointers inside INDIRECT_REFs, so an expression like
3066 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3067 find all the indirect and direct uses of x_1 inside. The only
3068 shortcut we can take is the fact that GIMPLE only allows
3069 INDIRECT_REFs inside the expressions below. */
3070 if (is_gimple_assign (stmt)
3071 || gimple_code (stmt) == GIMPLE_RETURN
3072 || gimple_code (stmt) == GIMPLE_ASM
3073 || is_gimple_call (stmt))
3075 struct walk_stmt_info wi;
3076 struct count_ptr_d count;
3079 count.num_stores = 0;
3080 count.num_loads = 0;
3082 memset (&wi, 0, sizeof (wi));
3084 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3086 *num_stores_p = count.num_stores;
3087 *num_loads_p = count.num_loads;
3090 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3093 /* From a tree operand OP return the base of a load or store operation
3094 or NULL_TREE if OP is not a load or a store. */
3097 get_base_loadstore (tree op)
3099 while (handled_component_p (op))
3100 op = TREE_OPERAND (op, 0);
3102 || INDIRECT_REF_P (op)
3103 || TREE_CODE (op) == TARGET_MEM_REF)
3108 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3109 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3110 passing the STMT, the base of the operand and DATA to it. The base
3111 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3112 or the argument of an address expression.
3113 Returns the results of these callbacks or'ed. */
3116 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3117 bool (*visit_load)(gimple, tree, void *),
3118 bool (*visit_store)(gimple, tree, void *),
3119 bool (*visit_addr)(gimple, tree, void *))
3123 if (gimple_assign_single_p (stmt))
3128 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3130 ret |= visit_store (stmt, lhs, data);
3132 rhs = gimple_assign_rhs1 (stmt);
3133 while (handled_component_p (rhs))
3134 rhs = TREE_OPERAND (rhs, 0);
3137 if (TREE_CODE (rhs) == ADDR_EXPR)
3138 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3139 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3140 && TMR_BASE (rhs) != NULL_TREE
3141 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3142 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3143 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3144 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3145 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3147 lhs = gimple_assign_lhs (stmt);
3148 if (TREE_CODE (lhs) == TARGET_MEM_REF
3149 && TMR_BASE (lhs) != NULL_TREE
3150 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3151 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3155 rhs = get_base_loadstore (rhs);
3157 ret |= visit_load (stmt, rhs, data);
3161 && (is_gimple_assign (stmt)
3162 || gimple_code (stmt) == GIMPLE_COND))
3164 for (i = 0; i < gimple_num_ops (stmt); ++i)
3165 if (gimple_op (stmt, i)
3166 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3167 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3169 else if (is_gimple_call (stmt))
3173 tree lhs = gimple_call_lhs (stmt);
3176 lhs = get_base_loadstore (lhs);
3178 ret |= visit_store (stmt, lhs, data);
3181 if (visit_load || visit_addr)
3182 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3184 tree rhs = gimple_call_arg (stmt, i);
3186 && TREE_CODE (rhs) == ADDR_EXPR)
3187 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3188 else if (visit_load)
3190 rhs = get_base_loadstore (rhs);
3192 ret |= visit_load (stmt, rhs, data);
3196 && gimple_call_chain (stmt)
3197 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3198 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3201 && gimple_call_return_slot_opt_p (stmt)
3202 && gimple_call_lhs (stmt) != NULL_TREE
3203 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3204 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3206 else if (gimple_code (stmt) == GIMPLE_ASM)
3209 const char *constraint;
3210 const char **oconstraints;
3211 bool allows_mem, allows_reg, is_inout;
3212 noutputs = gimple_asm_noutputs (stmt);
3213 oconstraints = XALLOCAVEC (const char *, noutputs);
3214 if (visit_store || visit_addr)
3215 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3217 tree link = gimple_asm_output_op (stmt, i);
3218 tree op = get_base_loadstore (TREE_VALUE (link));
3219 if (op && visit_store)
3220 ret |= visit_store (stmt, op, data);
3223 constraint = TREE_STRING_POINTER
3224 (TREE_VALUE (TREE_PURPOSE (link)));
3225 oconstraints[i] = constraint;
3226 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3227 &allows_reg, &is_inout);
3228 if (op && !allows_reg && allows_mem)
3229 ret |= visit_addr (stmt, op, data);
3232 if (visit_load || visit_addr)
3233 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3235 tree link = gimple_asm_input_op (stmt, i);
3236 tree op = TREE_VALUE (link);
3238 && TREE_CODE (op) == ADDR_EXPR)
3239 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3240 else if (visit_load || visit_addr)
3242 op = get_base_loadstore (op);
3246 ret |= visit_load (stmt, op, data);
3249 constraint = TREE_STRING_POINTER
3250 (TREE_VALUE (TREE_PURPOSE (link)));
3251 parse_input_constraint (&constraint, 0, 0, noutputs,
3253 &allows_mem, &allows_reg);
3254 if (!allows_reg && allows_mem)
3255 ret |= visit_addr (stmt, op, data);
3261 else if (gimple_code (stmt) == GIMPLE_RETURN)
3263 tree op = gimple_return_retval (stmt);
3267 && TREE_CODE (op) == ADDR_EXPR)
3268 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3269 else if (visit_load)
3271 op = get_base_loadstore (op);
3273 ret |= visit_load (stmt, op, data);
3278 && gimple_code (stmt) == GIMPLE_PHI)
3280 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3282 tree op = PHI_ARG_DEF (stmt, i);
3283 if (TREE_CODE (op) == ADDR_EXPR)
3284 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3291 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3292 should make a faster clone for this case. */
3295 walk_stmt_load_store_ops (gimple stmt, void *data,
3296 bool (*visit_load)(gimple, tree, void *),
3297 bool (*visit_store)(gimple, tree, void *))
3299 return walk_stmt_load_store_addr_ops (stmt, data,
3300 visit_load, visit_store, NULL);
3303 /* Helper for gimple_ior_addresses_taken_1. */
3306 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3307 tree addr, void *data)
3309 bitmap addresses_taken = (bitmap)data;
3310 while (handled_component_p (addr))
3311 addr = TREE_OPERAND (addr, 0);
3314 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3320 /* Set the bit for the uid of all decls that have their address taken
3321 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3322 were any in this stmt. */
3325 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3327 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3328 gimple_ior_addresses_taken_1);
3332 /* Return a printable name for symbol DECL. */
3335 gimple_decl_printable_name (tree decl, int verbosity)
3337 gcc_assert (decl && DECL_NAME (decl));
3339 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3341 const char *str, *mangled_str;
3342 int dmgl_opts = DMGL_NO_OPTS;
3346 dmgl_opts = DMGL_VERBOSE
3351 if (TREE_CODE (decl) == FUNCTION_DECL)
3352 dmgl_opts |= DMGL_PARAMS;
3355 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3356 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3357 return (str) ? str : mangled_str;
3360 return IDENTIFIER_POINTER (DECL_NAME (decl));
3364 /* Fold a OBJ_TYPE_REF expression to the address of a function.
3365 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted
3366 from cp_fold_obj_type_ref, but it tolerates types with no binfo
3370 gimple_fold_obj_type_ref (tree ref, tree known_type)
3372 HOST_WIDE_INT index;
3377 if (TYPE_BINFO (known_type) == NULL_TREE)
3380 v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
3381 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
3385 i += (TARGET_VTABLE_USES_DESCRIPTORS
3386 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
3390 fndecl = TREE_VALUE (v);
3392 #ifdef ENABLE_CHECKING
3393 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
3394 DECL_VINDEX (fndecl)));
3397 cgraph_node (fndecl)->local.vtable_method = true;
3399 return build_fold_addr_expr (fndecl);
3402 #include "gt-gimple.h"