+2009-11-25 H.J. Lu <hongjiu.lu@intel.com>
+
+ * alias.c: Remove trailing white spaces.
+ * alloc-pool.c: Likewise.
+ * alloc-pool.h: Likewise.
+ * attribs.c: Likewise.
+ * auto-inc-dec.c: Likewise.
+ * basic-block.h: Likewise.
+ * bb-reorder.c: Likewise.
+ * bt-load.c: Likewise.
+ * builtins.c: Likewise.
+ * builtins.def: Likewise.
+ * c-common.c: Likewise.
+ * c-common.h: Likewise.
+ * c-cppbuiltin.c: Likewise.
+ * c-decl.c: Likewise.
+ * c-format.c: Likewise.
+ * c-lex.c: Likewise.
+ * c-omp.c: Likewise.
+ * c-opts.c: Likewise.
+ * c-parser.c: Likewise.
+ * c-pretty-print.c: Likewise.
+ * c-tree.h: Likewise.
+ * c-typeck.c: Likewise.
+ * caller-save.c: Likewise.
+ * calls.c: Likewise.
+ * cfg.c: Likewise.
+ * cfganal.c: Likewise.
+ * cfgexpand.c: Likewise.
+ * cfghooks.c: Likewise.
+ * cfghooks.h: Likewise.
+ * cfglayout.c: Likewise.
+ * cfgloop.c: Likewise.
+ * cfgloop.h: Likewise.
+ * cfgloopmanip.c: Likewise.
+ * cfgrtl.c: Likewise.
+ * cgraph.c: Likewise.
+ * cgraph.h: Likewise.
+ * cgraphbuild.c: Likewise.
+ * cgraphunit.c: Likewise.
+ * cif-code.def: Likewise.
+ * collect2.c: Likewise.
+ * combine.c: Likewise.
+ * convert.c: Likewise.
+ * coverage.c: Likewise.
+ * crtstuff.c: Likewise.
+ * cse.c: Likewise.
+ * cselib.c: Likewise.
+ * dbgcnt.c: Likewise.
+ * dbgcnt.def: Likewise.
+ * dbgcnt.h: Likewise.
+ * dbxout.c: Likewise.
+ * dce.c: Likewise.
+ * ddg.c: Likewise.
+ * ddg.h: Likewise.
+ * defaults.h: Likewise.
+ * df-byte-scan.c: Likewise.
+ * df-core.c: Likewise.
+ * df-problems.c: Likewise.
+ * df-scan.c: Likewise.
+ * df.h: Likewise.
+ * dfp.c: Likewise.
+ * diagnostic.c: Likewise.
+ * diagnostic.h: Likewise.
+ * dominance.c: Likewise.
+ * domwalk.c: Likewise.
+ * double-int.c: Likewise.
+ * double-int.h: Likewise.
+ * dse.c: Likewise.
+ * dwarf2asm.c: Likewise.
+ * dwarf2asm.h: Likewise.
+ * dwarf2out.c: Likewise.
+ * ebitmap.c: Likewise.
+ * ebitmap.h: Likewise.
+ * emit-rtl.c: Likewise.
+ * et-forest.c: Likewise.
+ * except.c: Likewise.
+ * except.h: Likewise.
+ * expmed.c: Likewise.
+ * expr.c: Likewise.
+ * expr.h: Likewise.
+ * final.c: Likewise.
+ * flags.h: Likewise.
+ * fold-const.c: Likewise.
+ * function.c: Likewise.
+ * function.h: Likewise.
+ * fwprop.c: Likewise.
+ * gcc.c: Likewise.
+ * gcov-dump.c: Likewise.
+ * gcov-io.c: Likewise.
+ * gcov-io.h: Likewise.
+ * gcov.c: Likewise.
+ * gcse.c: Likewise.
+ * genattr.c: Likewise.
+ * genattrtab.c: Likewise.
+ * genautomata.c: Likewise.
+ * genchecksum.c: Likewise.
+ * genconfig.c: Likewise.
+ * genflags.c: Likewise.
+ * gengtype-parse.c: Likewise.
+ * gengtype.c: Likewise.
+ * gengtype.h: Likewise.
+ * genmddeps.c: Likewise.
+ * genmodes.c: Likewise.
+ * genopinit.c: Likewise.
+ * genpreds.c: Likewise.
+ * gensupport.c: Likewise.
+ * ggc-common.c: Likewise.
+ * ggc-page.c: Likewise.
+ * ggc-zone.c: Likewise.
+ * ggc.h: Likewise.
+ * gimple-iterator.c: Likewise.
+ * gimple-low.c: Likewise.
+ * gimple-pretty-print.c: Likewise.
+ * gimple.c: Likewise.
+ * gimple.def: Likewise.
+ * gimple.h: Likewise.
+ * gimplify.c: Likewise.
+ * graphds.c: Likewise.
+ * graphite-clast-to-gimple.c: Likewise.
+ * gthr-nks.h: Likewise.
+ * gthr-posix.c: Likewise.
+ * gthr-posix.h: Likewise.
+ * gthr-posix95.h: Likewise.
+ * gthr-single.h: Likewise.
+ * gthr-tpf.h: Likewise.
+ * gthr-vxworks.h: Likewise.
+ * gthr.h: Likewise.
+ * haifa-sched.c: Likewise.
+ * hard-reg-set.h: Likewise.
+ * hooks.c: Likewise.
+ * hooks.h: Likewise.
+ * hosthooks.h: Likewise.
+ * hwint.h: Likewise.
+ * ifcvt.c: Likewise.
+ * incpath.c: Likewise.
+ * init-regs.c: Likewise.
+ * integrate.c: Likewise.
+ * ipa-cp.c: Likewise.
+ * ipa-inline.c: Likewise.
+ * ipa-prop.c: Likewise.
+ * ipa-pure-const.c: Likewise.
+ * ipa-reference.c: Likewise.
+ * ipa-struct-reorg.c: Likewise.
+ * ipa-struct-reorg.h: Likewise.
+ * ipa-type-escape.c: Likewise.
+ * ipa-type-escape.h: Likewise.
+ * ipa-utils.c: Likewise.
+ * ipa-utils.h: Likewise.
+ * ipa.c: Likewise.
+ * ira-build.c: Likewise.
+ * ira-color.c: Likewise.
+ * ira-conflicts.c: Likewise.
+ * ira-costs.c: Likewise.
+ * ira-emit.c: Likewise.
+ * ira-int.h: Likewise.
+ * ira-lives.c: Likewise.
+ * ira.c: Likewise.
+ * jump.c: Likewise.
+ * lambda-code.c: Likewise.
+ * lambda-mat.c: Likewise.
+ * lambda-trans.c: Likewise.
+ * lambda.h: Likewise.
+ * langhooks.c: Likewise.
+ * lcm.c: Likewise.
+ * libgcov.c: Likewise.
+ * lists.c: Likewise.
+ * loop-doloop.c: Likewise.
+ * loop-init.c: Likewise.
+ * loop-invariant.c: Likewise.
+ * loop-iv.c: Likewise.
+ * loop-unroll.c: Likewise.
+ * lower-subreg.c: Likewise.
+ * lto-cgraph.c: Likewise.
+ * lto-compress.c: Likewise.
+ * lto-opts.c: Likewise.
+ * lto-section-in.c: Likewise.
+ * lto-section-out.c: Likewise.
+ * lto-streamer-in.c: Likewise.
+ * lto-streamer-out.c: Likewise.
+ * lto-streamer.c: Likewise.
+ * lto-streamer.h: Likewise.
+ * lto-symtab.c: Likewise.
+ * lto-wpa-fixup.c: Likewise.
+ * matrix-reorg.c: Likewise.
+ * mcf.c: Likewise.
+ * mode-switching.c: Likewise.
+ * modulo-sched.c: Likewise.
+ * omega.c: Likewise.
+ * omega.h: Likewise.
+ * omp-low.c: Likewise.
+ * optabs.c: Likewise.
+ * optabs.h: Likewise.
+ * opts-common.c: Likewise.
+ * opts.c: Likewise.
+ * params.def: Likewise.
+ * params.h: Likewise.
+ * passes.c: Likewise.
+ * plugin.c: Likewise.
+ * postreload-gcse.c: Likewise.
+ * postreload.c: Likewise.
+ * predict.c: Likewise.
+ * predict.def: Likewise.
+ * pretty-print.c: Likewise.
+ * pretty-print.h: Likewise.
+ * print-rtl.c: Likewise.
+ * print-tree.c: Likewise.
+ * profile.c: Likewise.
+ * read-rtl.c: Likewise.
+ * real.c: Likewise.
+ * recog.c: Likewise.
+ * reg-stack.c: Likewise.
+ * regcprop.c: Likewise.
+ * reginfo.c: Likewise.
+ * regmove.c: Likewise.
+ * regrename.c: Likewise.
+ * regs.h: Likewise.
+ * regstat.c: Likewise.
+ * reload.c: Likewise.
+ * reload1.c: Likewise.
+ * resource.c: Likewise.
+ * rtl.c: Likewise.
+ * rtl.def: Likewise.
+ * rtl.h: Likewise.
+ * rtlanal.c: Likewise.
+ * sbitmap.c: Likewise.
+ * sched-deps.c: Likewise.
+ * sched-ebb.c: Likewise.
+ * sched-int.h: Likewise.
+ * sched-rgn.c: Likewise.
+ * sched-vis.c: Likewise.
+ * sdbout.c: Likewise.
+ * sel-sched-dump.c: Likewise.
+ * sel-sched-dump.h: Likewise.
+ * sel-sched-ir.c: Likewise.
+ * sel-sched-ir.h: Likewise.
+ * sel-sched.c: Likewise.
+ * sel-sched.h: Likewise.
+ * sese.c: Likewise.
+ * sese.h: Likewise.
+ * simplify-rtx.c: Likewise.
+ * stack-ptr-mod.c: Likewise.
+ * stmt.c: Likewise.
+ * stor-layout.c: Likewise.
+ * store-motion.c: Likewise.
+ * stringpool.c: Likewise.
+ * stub-objc.c: Likewise.
+ * sync-builtins.def: Likewise.
+ * target-def.h: Likewise.
+ * target.h: Likewise.
+ * targhooks.c: Likewise.
+ * targhooks.h: Likewise.
+ * timevar.c: Likewise.
+ * tlink.c: Likewise.
+ * toplev.c: Likewise.
+ * toplev.h: Likewise.
+ * tracer.c: Likewise.
+ * tree-affine.c: Likewise.
+ * tree-affine.h: Likewise.
+ * tree-browser.def: Likewise.
+ * tree-call-cdce.c: Likewise.
+ * tree-cfg.c: Likewise.
+ * tree-cfgcleanup.c: Likewise.
+ * tree-chrec.c: Likewise.
+ * tree-chrec.h: Likewise.
+ * tree-complex.c: Likewise.
+ * tree-data-ref.c: Likewise.
+ * tree-data-ref.h: Likewise.
+ * tree-dfa.c: Likewise.
+ * tree-dump.c: Likewise.
+ * tree-dump.h: Likewise.
+ * tree-eh.c: Likewise.
+ * tree-flow-inline.h: Likewise.
+ * tree-flow.h: Likewise.
+ * tree-if-conv.c: Likewise.
+ * tree-inline.c: Likewise.
+ * tree-into-ssa.c: Likewise.
+ * tree-loop-distribution.c: Likewise.
+ * tree-loop-linear.c: Likewise.
+ * tree-mudflap.c: Likewise.
+ * tree-nested.c: Likewise.
+ * tree-nomudflap.c: Likewise.
+ * tree-nrv.c: Likewise.
+ * tree-object-size.c: Likewise.
+ * tree-optimize.c: Likewise.
+ * tree-outof-ssa.c: Likewise.
+ * tree-parloops.c: Likewise.
+ * tree-pass.h: Likewise.
+ * tree-phinodes.c: Likewise.
+ * tree-predcom.c: Likewise.
+ * tree-pretty-print.c: Likewise.
+ * tree-profile.c: Likewise.
+ * tree-scalar-evolution.c: Likewise.
+ * tree-ssa-address.c: Likewise.
+ * tree-ssa-alias.c: Likewise.
+ * tree-ssa-ccp.c: Likewise.
+ * tree-ssa-coalesce.c: Likewise.
+ * tree-ssa-copy.c: Likewise.
+ * tree-ssa-copyrename.c: Likewise.
+ * tree-ssa-dce.c: Likewise.
+ * tree-ssa-dom.c: Likewise.
+ * tree-ssa-dse.c: Likewise.
+ * tree-ssa-forwprop.c: Likewise.
+ * tree-ssa-ifcombine.c: Likewise.
+ * tree-ssa-live.c: Likewise.
+ * tree-ssa-live.h: Likewise.
+ * tree-ssa-loop-ch.c: Likewise.
+ * tree-ssa-loop-im.c: Likewise.
+ * tree-ssa-loop-ivcanon.c: Likewise.
+ * tree-ssa-loop-ivopts.c: Likewise.
+ * tree-ssa-loop-manip.c: Likewise.
+ * tree-ssa-loop-niter.c: Likewise.
+ * tree-ssa-loop-prefetch.c: Likewise.
+ * tree-ssa-loop-unswitch.c: Likewise.
+ * tree-ssa-loop.c: Likewise.
+ * tree-ssa-math-opts.c: Likewise.
+ * tree-ssa-operands.c: Likewise.
+ * tree-ssa-operands.h: Likewise.
+ * tree-ssa-phiopt.c: Likewise.
+ * tree-ssa-phiprop.c: Likewise.
+ * tree-ssa-pre.c: Likewise.
+ * tree-ssa-propagate.c: Likewise.
+ * tree-ssa-reassoc.c: Likewise.
+ * tree-ssa-sccvn.c: Likewise.
+ * tree-ssa-sink.c: Likewise.
+ * tree-ssa-structalias.c: Likewise.
+ * tree-ssa-ter.c: Likewise.
+ * tree-ssa-threadedge.c: Likewise.
+ * tree-ssa-threadupdate.c: Likewise.
+ * tree-ssa-uncprop.c: Likewise.
+ * tree-ssa.c: Likewise.
+ * tree-ssanames.c: Likewise.
+ * tree-switch-conversion.c: Likewise.
+ * tree-tailcall.c: Likewise.
+ * tree-vect-data-refs.c: Likewise.
+ * tree-vect-generic.c: Likewise.
+ * tree-vect-loop-manip.c: Likewise.
+ * tree-vect-loop.c: Likewise.
+ * tree-vect-patterns.c: Likewise.
+ * tree-vect-slp.c: Likewise.
+ * tree-vect-stmts.c: Likewise.
+ * tree-vectorizer.c: Likewise.
+ * tree-vectorizer.h: Likewise.
+ * tree-vrp.c: Likewise.
+ * tree.c: Likewise.
+ * tree.def: Likewise.
+ * tree.h: Likewise.
+ * treestruct.def: Likewise.
+ * unwind-compat.c: Likewise.
+ * unwind-dw2-fde-glibc.c: Likewise.
+ * unwind-dw2.c: Likewise.
+ * value-prof.c: Likewise.
+ * value-prof.h: Likewise.
+ * var-tracking.c: Likewise.
+ * varasm.c: Likewise.
+ * varpool.c: Likewise.
+ * vec.c: Likewise.
+ * vec.h: Likewise.
+ * vmsdbgout.c: Likewise.
+ * web.c: Likewise.
+ * xcoffout.c: Likewise.
+
2009-11-24 John David Anglin <dave.anglin@nrc-cnrc.gc.ca>
* pa.c (output_call): Only use sr4 for long interspace calls if
{
if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
return 1;
-
- return -1;
+
+ return -1;
}
return 0;
}
if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
/* Indicate that dependence was determined and stop traversal. */
return 1;
-
+
return -1;
}
return 0;
block = XNEWVEC (char, pool->block_size);
block_header = (alloc_pool_list) block;
block += align_eight (sizeof (struct alloc_pool_list_def));
-
+
/* Throw it on the block list. */
block_header->next = pool->block_list;
pool->block_list = block_header;
pool->blocks_allocated += 1;
}
-
+
/* We now know that we can take the first elt off the virgin list and
put it on the returned list. */
block = pool->virgin_free_list;
if (d->allocated)
{
fprintf (stderr, "%-22s %6d %10lu %10lu(%10lu) %10lu(%10lu) %10lu(%10lu)\n", d->name,
- d->elt_size, d->created, d->allocated, d->allocated / d->elt_size,
- d->peak, d->peak / d->elt_size,
+ d->elt_size, d->created, d->allocated, d->allocated / d->elt_size,
+ d->peak, d->peak / d->elt_size,
d->current, d->current / d->elt_size);
i->total_allocated += d->allocated;
i->total_created += d->created;
char* virgin_free_list;
/* The number of elements in the virgin_free_list that can be
- allocated before needing another block. */
+ allocated before needing another block. */
size_t virgin_elts_remaining;
size_t elts_allocated;
/* Insert a single ATTR into the attribute table. */
void
-register_attribute (const struct attribute_spec *attr)
+register_attribute (const struct attribute_spec *attr)
{
struct substring str;
void **slot;
/* Discovery of auto-inc and auto-dec instructions.
Copyright (C) 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
-
+
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
...
b <- a + c
- (For this case to be true, b must not be assigned or used between
+ (For this case to be true, b must not be assigned or used between
the *a and the assignment to b. B must also be a Pmode reg.)
becomes
by the pointer. This is useful for machines that have
HAVE_PRE_MODIFY_DISP, HAVE_POST_MODIFY_DISP defined.
- 3) c is a register. This is useful for machines that have
- HAVE_PRE_MODIFY_REG, HAVE_POST_MODIFY_REG
-
+ 3) c is a register. This is useful for machines that have
+ HAVE_PRE_MODIFY_REG, HAVE_POST_MODIFY_REG
+
The is one special case: if a already had an offset equal to it +-
its width and that offset is equal to -c when the increment was
before the ref or +c if the increment was after the ref, then if we
ANY is used for constants that are not +-size or 0. REG is used if
the forms are reg1 + reg2. */
-enum inc_state
+enum inc_state
{
INC_ZERO, /* == 0 */
INC_NEG_SIZE, /* == +size */
/* Parsed fields of an inc insn of the form "reg_res = reg0+reg1" or
"reg_res = reg0+c". */
-static struct inc_insn
+static struct inc_insn
{
rtx insn; /* The insn being parsed. */
rtx pat; /* The pattern of the insn. */
/* Dump the parsed inc insn to FILE. */
-static void
+static void
dump_inc_insn (FILE *file)
{
- const char *f = ((inc_insn.form == FORM_PRE_ADD)
+ const char *f = ((inc_insn.form == FORM_PRE_ADD)
|| (inc_insn.form == FORM_PRE_INC)) ? "pre" : "post";
dump_insn_slim (file, inc_insn.insn);
case FORM_PRE_ADD:
case FORM_POST_ADD:
if (inc_insn.reg1_is_const)
- fprintf (file, "found %s add(%d) r[%d]=r[%d]+%d\n",
- f, INSN_UID (inc_insn.insn),
- REGNO (inc_insn.reg_res),
+ fprintf (file, "found %s add(%d) r[%d]=r[%d]+%d\n",
+ f, INSN_UID (inc_insn.insn),
+ REGNO (inc_insn.reg_res),
REGNO (inc_insn.reg0), (int) inc_insn.reg1_val);
else
- fprintf (file, "found %s add(%d) r[%d]=r[%d]+r[%d]\n",
- f, INSN_UID (inc_insn.insn),
- REGNO (inc_insn.reg_res),
+ fprintf (file, "found %s add(%d) r[%d]=r[%d]+r[%d]\n",
+ f, INSN_UID (inc_insn.insn),
+ REGNO (inc_insn.reg_res),
REGNO (inc_insn.reg0), REGNO (inc_insn.reg1));
break;
-
+
case FORM_PRE_INC:
case FORM_POST_INC:
if (inc_insn.reg1_is_const)
- fprintf (file, "found %s inc(%d) r[%d]+=%d\n",
- f, INSN_UID (inc_insn.insn),
+ fprintf (file, "found %s inc(%d) r[%d]+=%d\n",
+ f, INSN_UID (inc_insn.insn),
REGNO (inc_insn.reg_res), (int) inc_insn.reg1_val);
else
- fprintf (file, "found %s inc(%d) r[%d]+=r[%d]\n",
- f, INSN_UID (inc_insn.insn),
+ fprintf (file, "found %s inc(%d) r[%d]+=r[%d]\n",
+ f, INSN_UID (inc_insn.insn),
REGNO (inc_insn.reg_res), REGNO (inc_insn.reg1));
break;
/* Dump the parsed mem insn to FILE. */
-static void
+static void
dump_mem_insn (FILE *file)
{
dump_insn_slim (file, mem_insn.insn);
if (mem_insn.reg1_is_const)
- fprintf (file, "found mem(%d) *(r[%d]+%d)\n",
- INSN_UID (mem_insn.insn),
+ fprintf (file, "found mem(%d) *(r[%d]+%d)\n",
+ INSN_UID (mem_insn.insn),
REGNO (mem_insn.reg0), (int) mem_insn.reg1_val);
else
- fprintf (file, "found mem(%d) *(r[%d]+r[%d])\n",
- INSN_UID (mem_insn.insn),
+ fprintf (file, "found mem(%d) *(r[%d]+r[%d])\n",
+ INSN_UID (mem_insn.insn),
REGNO (mem_insn.reg0), REGNO (mem_insn.reg1));
}
insn. Moving the REG_EQUAL and REG_EQUIV is clearly wrong and it
does not appear that there are any other kinds of relevant notes. */
-static void
+static void
move_dead_notes (rtx to_insn, rtx from_insn, rtx pattern)
{
- rtx note;
+ rtx note;
rtx next_note;
rtx prev_note = NULL;
for (note = REG_NOTES (from_insn); note; note = next_note)
{
next_note = XEXP (note, 1);
-
+
if ((REG_NOTE_KIND (note) == REG_DEAD)
&& pattern == XEXP (note, 0))
{
return insns;
}
-
+
/* Change mem_insn.mem_loc so that uses NEW_ADDR which has an
increment of INC_REG. To have reached this point, the change is a
legitimate one from a dataflow point of view. The only questions
if (! validate_change (mem_insn.insn, mem_insn.mem_loc, new_mem, 0))
{
if (dump_file)
- fprintf (dump_file, "validation failure\n");
+ fprintf (dump_file, "validation failure\n");
return false;
}
/* Replace the addition with a move. Do it at the location of
the addition since the operand of the addition may change
before the memory reference. */
- mov_insn = insert_move_insn_before (inc_insn.insn,
+ mov_insn = insert_move_insn_before (inc_insn.insn,
inc_insn.reg_res, inc_insn.reg0);
move_dead_notes (mov_insn, inc_insn.insn, inc_insn.reg0);
break;
case FORM_POST_ADD:
- mov_insn = insert_move_insn_before (mem_insn.insn,
+ mov_insn = insert_move_insn_before (mem_insn.insn,
inc_insn.reg_res, inc_insn.reg0);
move_dead_notes (mov_insn, inc_insn.insn, inc_insn.reg0);
Assuming the form is ok, a prototype new address is built which is
passed to ATTEMPT_CHANGE for final processing. */
-static bool
+static bool
try_merge (void)
{
enum gen_form gen_form;
return false;
}
- mem_insn.reg1_state = (mem_insn.reg1_is_const)
+ mem_insn.reg1_state = (mem_insn.reg1_is_const)
? set_inc_state (mem_insn.reg1_val, size) : INC_REG;
inc_insn.reg1_state = (inc_insn.reg1_is_const)
? set_inc_state (inc_insn.reg1_val, size) : INC_REG;
/* Now get the form that we are generating. */
- gen_form = decision_table
+ gen_form = decision_table
[inc_insn.reg1_state][mem_insn.reg1_state][inc_insn.form];
if (dbg_cnt (auto_inc_dec) == false)
fprintf (dump_file, "trying SIMPLE_PRE_INC\n");
return attempt_change (gen_rtx_PRE_INC (reg_mode, inc_reg), inc_reg);
break;
-
+
case SIMPLE_POST_INC: /* size++ */
if (dump_file)
fprintf (dump_file, "trying SIMPLE_POST_INC\n");
return attempt_change (gen_rtx_POST_INC (reg_mode, inc_reg), inc_reg);
break;
-
+
case SIMPLE_PRE_DEC: /* --size */
if (dump_file)
fprintf (dump_file, "trying SIMPLE_PRE_DEC\n");
return attempt_change (gen_rtx_PRE_DEC (reg_mode, inc_reg), inc_reg);
break;
-
+
case SIMPLE_POST_DEC: /* size-- */
if (dump_file)
fprintf (dump_file, "trying SIMPLE_POST_DEC\n");
return attempt_change (gen_rtx_POST_DEC (reg_mode, inc_reg), inc_reg);
break;
-
+
case DISP_PRE: /* ++con */
if (dump_file)
fprintf (dump_file, "trying DISP_PRE\n");
inc_insn.reg1)),
inc_reg);
break;
-
+
case DISP_POST: /* con++ */
if (dump_file)
fprintf (dump_file, "trying POST_DISP\n");
inc_insn.reg1)),
inc_reg);
break;
-
+
case REG_PRE: /* ++reg */
if (dump_file)
fprintf (dump_file, "trying PRE_REG\n");
inc_insn.reg1)),
inc_reg);
break;
-
+
case REG_POST: /* reg++ */
if (dump_file)
fprintf (dump_file, "trying POST_REG\n");
/* Reverse the operands in a mem insn. */
-static void
+static void
reverse_mem (void)
{
- rtx tmp = mem_insn.reg1;
+ rtx tmp = mem_insn.reg1;
mem_insn.reg1 = mem_insn.reg0;
mem_insn.reg0 = tmp;
}
/* Reverse the operands in a inc insn. */
-static void
+static void
reverse_inc (void)
{
- rtx tmp = inc_insn.reg1;
+ rtx tmp = inc_insn.reg1;
inc_insn.reg1 = inc_insn.reg0;
inc_insn.reg0 = tmp;
}
/* Return true if INSN is of a form "a = b op c" where a and b are
regs. op is + if c is a reg and +|- if c is a const. Fill in
- INC_INSN with what is found.
-
+ INC_INSN with what is found.
+
This function is called in two contexts, if BEFORE_MEM is true,
this is called for each insn in the basic block. If BEFORE_MEM is
false, it is called for the instruction in the block that uses the
inc_insn.reg0 = XEXP (SET_SRC (pat), 0);
if (rtx_equal_p (inc_insn.reg_res, inc_insn.reg0))
inc_insn.form = before_mem ? FORM_PRE_INC : FORM_POST_INC;
- else
+ else
inc_insn.form = before_mem ? FORM_PRE_ADD : FORM_POST_ADD;
if (CONST_INT_P (XEXP (SET_SRC (pat), 1)))
/* Process a = b + c where c is a reg. */
inc_insn.reg1 = XEXP (SET_SRC (pat), 1);
inc_insn.reg1_is_const = false;
-
- if (inc_insn.form == FORM_PRE_INC
+
+ if (inc_insn.form == FORM_PRE_INC
|| inc_insn.form == FORM_POST_INC)
return true;
else if (rtx_equal_p (inc_insn.reg_res, inc_insn.reg1))
inc_insn.form = before_mem ? FORM_PRE_INC : FORM_POST_INC;
return true;
}
- else
+ else
return true;
}
ADDRESS_OF_X to see if any single one of them is compatible with
what has been found in inc_insn.
- -1 is returned for success. 0 is returned if nothing was found and
+ -1 is returned for success. 0 is returned if nothing was found and
1 is returned for failure. */
static int
{
/* Match with *(reg0 + reg1) where reg1 is a const. */
HOST_WIDE_INT val = INTVAL (b);
- if (inc_insn.reg1_is_const
+ if (inc_insn.reg1_is_const
&& (inc_insn.reg1_val == val || inc_insn.reg1_val == -val))
{
mem_insn.reg1_val = val;
return -1;
}
}
- else if (!inc_insn.reg1_is_const
- && rtx_equal_p (inc_insn.reg1, b))
+ else if (!inc_insn.reg1_is_const
+ && rtx_equal_p (inc_insn.reg1, b))
/* Match with *(reg0 + reg1). */
return -1;
}
add of the second register. The FIRST_TRY parameter is used to
only allow the parameters to be reversed once. */
-static bool
+static bool
find_inc (bool first_try)
{
rtx insn;
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg0, 1) != 1)
{
if (dump_file)
- fprintf (dump_file, "mem count failure\n");
+ fprintf (dump_file, "mem count failure\n");
return false;
}
dump_mem_insn (dump_file);
/* Find the next use that is an inc. */
- insn = get_next_ref (REGNO (mem_insn.reg0),
- BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
+ insn = get_next_ref (REGNO (mem_insn.reg0),
+ BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
reg_next_inc_use);
if (!insn)
return false;
{
/* Next use was not an add. Look for one extra case. It could be
that we have:
-
+
*(a + b)
...= a;
...= b + a
-
+
if we reverse the operands in the mem ref we would
find this. Only try it once though. */
if (first_try && !mem_insn.reg1_is_const)
return false;
}
- /* Need to assure that none of the operands of the inc instruction are
+ /* Need to assure that none of the operands of the inc instruction are
assigned to by the mem insn. */
for (def_rec = DF_INSN_DEFS (mem_insn.insn); *def_rec; def_rec++)
{
df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
- if ((regno == REGNO (inc_insn.reg0))
+ if ((regno == REGNO (inc_insn.reg0))
|| (regno == REGNO (inc_insn.reg_res)))
{
if (dump_file)
{
/* Make sure that there is no insn that assigns to inc_insn.res
between the mem_insn and the inc_insn. */
- rtx other_insn = get_next_ref (REGNO (inc_insn.reg_res),
- BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
+ rtx other_insn = get_next_ref (REGNO (inc_insn.reg_res),
+ BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
reg_next_def);
if (other_insn != inc_insn.insn)
{
if (dump_file)
- fprintf (dump_file,
+ fprintf (dump_file,
"result of add is assigned to between mem and inc insns.\n");
return false;
}
- other_insn = get_next_ref (REGNO (inc_insn.reg_res),
- BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
+ other_insn = get_next_ref (REGNO (inc_insn.reg_res),
+ BASIC_BLOCK (BLOCK_NUM (mem_insn.insn)),
reg_next_use);
- if (other_insn
+ if (other_insn
&& (other_insn != inc_insn.insn)
&& (DF_INSN_LUID (inc_insn.insn) > DF_INSN_LUID (other_insn)))
{
if (dump_file)
- fprintf (dump_file,
+ fprintf (dump_file,
"result of add is used between mem and inc insns.\n");
return false;
}
int luid = DF_INSN_LUID (inc_insn.insn);
if (inc_insn.form == FORM_POST_ADD)
{
- /* The trick is that we are not going to increment r0,
+ /* The trick is that we are not going to increment r0,
we are going to increment the result of the add insn.
For this trick to be correct, the result reg of
the inc must be a valid addressing reg. */
/* We also need to make sure that the next use of
inc result is after the inc. */
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_use);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
if (!rtx_equal_p (mem_insn.reg0, inc_insn.reg0))
- reverse_inc ();
+ reverse_inc ();
}
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_def);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
need to treat it as if it was *(b + a). It may also be that
the add is of the form a + c where c does not match b and
then we just abandon this. */
-
+
int luid = DF_INSN_LUID (inc_insn.insn);
rtx other_insn;
-
+
/* Make sure this reg appears only once in this insn. */
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg1, 1) != 1)
return false;
-
+
if (inc_insn.form == FORM_POST_ADD)
{
/* For this trick to be correct, the result reg of the inc
/* Need to check that there are no assignments to b
before the add insn. */
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_def);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
}
/* To have gotten here know that.
*(b + a)
-
+
... = (b + a)
-
+
We also know that the lhs of the inc is not b or a. We
need to make sure that there are no assignments to b
- between the mem ref and the inc. */
-
- other_insn
+ between the mem ref and the inc. */
+
+ other_insn
= get_next_ref (REGNO (inc_insn.reg0), bb, reg_next_def);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
/* Need to check that the next use of the add result is later than
add insn since this will be the reg incremented. */
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg_res), bb, reg_next_use);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
}
else /* FORM_POST_INC. There is less to check here because we
- know that operands must line up. */
+ know that operands must line up. */
{
if (!rtx_equal_p (mem_insn.reg1, inc_insn.reg1))
/* See comment above on find_inc (false) call. */
reverse_mem ();
return find_inc (false);
}
- else
+ else
return false;
}
-
+
/* To have gotten here know that.
*(a + b)
-
+
... = (a + b)
-
+
We also know that the lhs of the inc is not b. We need to make
sure that there are no assignments to b between the mem ref and
the inc. */
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_def);
if (other_insn && luid > DF_INSN_LUID (other_insn))
return false;
if (inc_insn.form == FORM_POST_INC)
{
- other_insn
+ other_insn
= get_next_ref (REGNO (inc_insn.reg0), bb, reg_next_use);
/* When we found inc_insn, we were looking for the
next add or inc, not the next insn that used the
bool insn_is_add_or_inc = true;
if (!NONDEBUG_INSN_P (insn))
- continue;
+ continue;
/* This continue is deliberate. We do not want the uses of the
- jump put into reg_next_use because it is not considered safe to
+ jump put into reg_next_use because it is not considered safe to
combine a preincrement with a jump. */
if (JUMP_P (insn))
continue;
clear of c because the inc insn is going to move
into the mem_insn.insn. */
int luid = DF_INSN_LUID (mem_insn.insn);
- rtx other_insn
+ rtx other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_use);
-
+
if (other_insn && luid > DF_INSN_LUID (other_insn))
ok = false;
-
- other_insn
+
+ other_insn
= get_next_ref (REGNO (inc_insn.reg1), bb, reg_next_def);
-
+
if (other_insn && luid > DF_INSN_LUID (other_insn))
ok = false;
}
-
+
if (dump_file)
dump_inc_insn (dump_file);
-
+
if (ok && find_address (&PATTERN (mem_insn.insn)) == -1)
{
if (dump_file)
if (find_mem (&PATTERN (insn)))
success_in_block++;
}
-
+
/* If the inc insn was merged with a mem, the inc insn is gone
and there is noting to update. */
if (DF_INSN_UID_GET (uid))
reg_next_inc_use[DF_REF_REGNO (def)] = NULL;
reg_next_def[DF_REF_REGNO (def)] = insn;
}
-
+
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
reg_next_inc_use[DF_REF_REGNO (use)] = insn;
else
reg_next_inc_use[DF_REF_REGNO (use)] = NULL;
- }
+ }
}
else if (dump_file)
fprintf (dump_file, "skipping update of deleted insn %d\n", uid);
#endif
-static unsigned int
+static unsigned int
rest_of_handle_auto_inc_dec (void)
{
#ifdef AUTO_INC_DEC
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func |
+ TODO_dump_func |
TODO_df_finish, /* todo_flags_finish */
}
};
for ((INSN) = BB_HEAD (BB), (CURR) = (INSN) ? NEXT_INSN ((INSN)): NULL; \
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
(INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
-
+
#define FOR_BB_INSNS_REVERSE(BB, INSN) \
for ((INSN) = BB_END (BB); \
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
edge_iterator ei;
/* Find EDGE_CAN_FALLTHRU edge. */
- FOR_EACH_EDGE (e, ei, cur_bb->succs)
+ FOR_EACH_EDGE (e, ei, cur_bb->succs)
if (e->flags & EDGE_CAN_FALLTHRU)
{
fall_thru = e;
/* This is the case where both edges out of the basic
block are crossing edges. Here we will fix up the
fall through edge. The jump edge will be taken care
- of later. The EDGE_CROSSING flag of fall_thru edge
+ of later. The EDGE_CROSSING flag of fall_thru edge
is unset before the call to force_nonfallthru
function because if a new basic-block is created
this edge remains in the current section boundary
CLEAR_HARD_REG_SET (all_btrs);
for (first_btr = -1, reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
if (TEST_HARD_REG_BIT (reg_class_contents[(int) btr_class], reg)
- && (allow_callee_save || call_used_regs[reg]
+ && (allow_callee_save || call_used_regs[reg]
|| df_regs_ever_live_p (reg)))
{
SET_HARD_REG_BIT (all_btrs, reg);
static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
static rtx expand_builtin_memcpy (tree, rtx);
static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
-static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
+static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
enum machine_mode, int);
static rtx expand_builtin_strcpy (tree, rtx);
static rtx expand_builtin_strcpy_args (tree, tree, rtx);
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ enum machine_mode mode;
int unsignedp, volatilep;
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
rtx fp, lab, stack, insn, last;
enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
- /* DRAP is needed for stack realign if longjmp is expanded to current
+ /* DRAP is needed for stack realign if longjmp is expanded to current
function */
if (SUPPORTS_STACK_ALIGNMENT)
crtl->need_drap = true;
fn = built_in_decls[BUILT_IN_SINCOSL];
else
gcc_unreachable ();
-
+
op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
/* Make sure not to fold the cexp call again. */
call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- return expand_expr (build_call_nary (ctype, call, 1, narg),
+ return expand_expr (build_call_nary (ctype, call, 1, narg),
target, VOIDmode, EXPAND_NORMAL);
}
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
gcc_unreachable ();
-
+
arg = CALL_EXPR_ARG (exp, 0);
switch (DECL_FUNCTION_CODE (fndecl))
return target;
}
-/* Expand expression EXP which is a call to the strlen builtin. Return
+/* Expand expression EXP which is a call to the strlen builtin. Return
NULL_RTX if we failed the caller should emit a normal call, otherwise
try to get the result in TARGET, if convenient. */
operation in-line. */
if (src_align == 0)
return NULL_RTX;
-
+
if (currently_expanding_gimple_stmt)
stringop_block_profile (currently_expanding_gimple_stmt,
&expected_align, &expected_size);
return target;
}
-/* Expand expression EXP, which is a call to the strcpy builtin. Return
- NULL_RTX if we failed the caller should emit a normal call, otherwise
+/* Expand expression EXP, which is a call to the strcpy builtin. Return
+ NULL_RTX if we failed the caller should emit a normal call, otherwise
try to get the result in TARGET, if convenient (and in mode MODE if that's
convenient). */
return c_readstr (str + offset, mode);
}
-/* Expand expression EXP, which is a call to the strncpy builtin. Return
+/* Expand expression EXP, which is a call to the strncpy builtin. Return
NULL_RTX if we failed the caller should emit a normal call. */
static rtx
return force_reg (mode, target);
}
-/* Expand expression EXP, which is a call to the memset builtin. Return
- NULL_RTX if we failed the caller should emit a normal call, otherwise
+/* Expand expression EXP, which is a call to the memset builtin. Return
+ NULL_RTX if we failed the caller should emit a normal call, otherwise
try to get the result in TARGET, if convenient (and in mode MODE if that's
convenient). */
dest_align, expected_align,
expected_size))
goto do_libcall;
-
+
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
dest_align, expected_align,
expected_size))
goto do_libcall;
-
+
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
return expand_call (fn, target, target == const0_rtx);
}
-/* Expand expression EXP, which is a call to the bzero builtin. Return
+/* Expand expression EXP, which is a call to the bzero builtin. Return
NULL_RTX if we failed the caller should emit a normal call. */
static rtx
return NULL_RTX;
}
-/* Expand expression EXP, which is a call to the strncmp builtin. Return
+/* Expand expression EXP, which is a call to the strncmp builtin. Return
NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
the result in TARGET, if convenient. */
rtx result;
/* Emit normal call if marked not-inlineable. */
- if (CALL_CANNOT_INLINE_P (exp))
+ if (CALL_CANNOT_INLINE_P (exp))
return NULL_RTX;
if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
return convert_to_mode (target_mode, target, 0);
}
-/* Expand a call to __builtin_expect. We just return our argument
+/* Expand a call to __builtin_expect. We just return our argument
as the builtin_expect semantic should've been already executed by
tree branch prediction pass. */
call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
return expand_call (call, target, ignore);
}
-
+
\f
/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
general form is actually an atomic exchange, and some targets only
support a reduced form with the second argument being a constant 1.
- EXP is the CALL_EXPR; TARGET is an optional place for us to store
+ EXP is the CALL_EXPR; TARGET is an optional place for us to store
the results. */
static rtx
if (! more_const_call_expr_args_p (&iter))
return END_BUILTINS;
-
+
arg = next_const_call_expr_arg (&iter);
argtype = TREE_TYPE (arg);
&& (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
type, mpfr_hypot)))
return res;
-
+
if (TREE_CODE (arg) == COMPLEX_EXPR)
{
tree real = TREE_OPERAND (arg, 0);
tree imag = TREE_OPERAND (arg, 1);
-
+
/* If either part is zero, cabs is fabs of the other. */
if (real_zerop (real))
return fold_build1_loc (loc, ABS_EXPR, type, imag);
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
return res;
-
+
/* Optimize sqrt(expN(x)) = expN(x*0.5). */
fcode = builtin_mathfn_code (arg);
if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
}
/* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
- if (fcode == BUILT_IN_POW
+ if (fcode == BUILT_IN_POW
|| fcode == BUILT_IN_POWF
|| fcode == BUILT_IN_POWL)
{
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
return res;
-
+
/* Optimize cos(-x) into cos (x). */
if ((narg = fold_strip_sign_ops (arg)))
return build_call_expr_loc (loc, fndecl, 1, narg);
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
return res;
-
+
/* Optimize cosh(-x) into cosh (x). */
if ((narg = fold_strip_sign_ops (arg)))
return build_call_expr_loc (loc, fndecl, 1, narg);
}
-
+
return NULL_TREE;
}
if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
return tmp;
#endif
-
+
/* Optimize fn(-x) into fn(x). */
if ((tmp = fold_strip_sign_ops (arg)))
return build_call_expr_loc (loc, fndecl, 1, tmp);
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
return res;
-
+
/* Optimize tan(atan(x)) = x. */
fcode = builtin_mathfn_code (arg);
if (flag_unsafe_math_optimizations
if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
return res;
#endif
-
+
rtype = TREE_TYPE (TREE_TYPE (arg0));
/* In case we can figure out the real part of arg0 and it is constant zero
{
CASE_FLT_FN (BUILT_IN_EXP):
/* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
- x = build_real (type, real_value_truncate (TYPE_MODE (type),
+ x = build_real (type, real_value_truncate (TYPE_MODE (type),
dconst_e ()));
exponent = CALL_EXPR_ARG (arg, 0);
break;
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
return res;
-
+
/* If either argument to hypot has a negate or abs, strip that off.
E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
narg0 = fold_strip_sign_ops (arg0);
narg1 = fold_strip_sign_ops (arg1);
if (narg0 || narg1)
{
- return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
+ return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
narg1 ? narg1 : arg1);
}
-
+
/* If either argument is zero, hypot is fabs of the other. */
if (real_zerop (arg0))
return fold_build1_loc (loc, ABS_EXPR, type, arg1);
else if (real_zerop (arg1))
return fold_build1_loc (loc, ABS_EXPR, type, arg0);
-
+
/* hypot(x,x) -> fabs(x)*sqrt(2). */
if (flag_unsafe_math_optimizations
&& operand_equal_p (arg0, arg1, OEP_PURE_SAME))
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
tree res;
-
+
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
return res;
src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
- /* Both DEST and SRC must be pointer types.
+ /* Both DEST and SRC must be pointer types.
??? This is what old code did. Is the testing for pointer types
really mandatory?
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
-
+
/* Transform toascii(c) -> (c & 0x7f). */
return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE, 0x7f));
/* Transform fmin/fmax(x,x) -> x. */
if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
return omit_one_operand_loc (loc, type, arg0, arg1);
-
+
/* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
functions to return the numeric arg if the other one is NaN.
These tree codes don't honor that, so only transform if
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
{
tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
-
+
if (atan2_fn)
{
tree new_arg = builtin_save_expr (arg);
return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
}
}
-
+
return NULL_TREE;
}
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
-
+
STRIP_NOPS (arg);
-
+
if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
{
const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
-
+
switch (value->cl)
{
case rvc_nan:
break;
}
}
-
+
return NULL_TREE;
}
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
-
+
STRIP_NOPS (arg);
-
+
if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
{
const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
-
+
switch (value->cl)
{
case rvc_zero:
break;
}
}
-
+
return NULL_TREE;
}
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
-
+
STRIP_NOPS (arg0);
-
+
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
-
+
arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
{
const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
tree frac, exp;
-
+
switch (value->cl)
{
case rvc_zero:
default:
gcc_unreachable ();
}
-
+
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
TREE_SIDE_EFFECTS (arg1) = 1;
|| (TREE_CODE (arg0) == REAL_CST
&& !real_isfinite (&TREE_REAL_CST (arg0))))
return omit_one_operand_loc (loc, type, arg0, arg1);
-
+
/* If both arguments are constant, then try to evaluate it. */
if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
&& TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
/* Bound the maximum adjustment to twice the range of the
mode's valid exponents. Use abs to ensure the range is
positive as a sanity check. */
- const long max_exp_adj = 2 *
+ const long max_exp_adj = 2 *
labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
- REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
/* Get the user-requested adjustment. */
const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
-
+
/* The requested adjustment must be inside this range. This
is a preliminary cap to avoid things like overflow, we
may still fail to compute the result for other reasons. */
if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
{
REAL_VALUE_TYPE initial_result;
-
+
real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
/* Ensure we didn't overflow. */
{
const REAL_VALUE_TYPE trunc_result
= real_value_truncate (TYPE_MODE (type), initial_result);
-
+
/* Only proceed if the target mode can hold the
resulting value. */
if (REAL_VALUES_EQUAL (initial_result, trunc_result))
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
-
+
STRIP_NOPS (arg0);
-
+
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
-
+
arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
frac.sign = value->sign;
break;
}
-
+
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
build_real (rettype, trunc));
return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
build_real (rettype, frac));
}
-
+
return NULL_TREE;
}
signbit_call, integer_zero_node);
isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
isinf_call, integer_zero_node);
-
+
tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
integer_minus_one_node, integer_one_node);
tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
enum machine_mode mode;
REAL_VALUE_TYPE r;
char buf[128];
-
+
/* Verify the required arguments in the original call. */
if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
INTEGER_TYPE, INTEGER_TYPE,
INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
return NULL_TREE;
-
+
fp_nan = CALL_EXPR_ARG (exp, 0);
fp_infinite = CALL_EXPR_ARG (exp, 1);
fp_normal = CALL_EXPR_ARG (exp, 2);
mode = TYPE_MODE (type);
arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
- /* fpclassify(x) ->
+ /* fpclassify(x) ->
isnan(x) ? FP_NAN :
(fabs(x) == Inf ? FP_INFINITE :
(fabs(x) >= DBL_MIN ? FP_NORMAL :
(x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
-
+
tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
build_real (type, dconst0));
res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
arg, build_real (type, r));
res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
-
+
if (HONOR_INFINITIES (mode))
{
real_inf (&r);
tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
}
-
+
return res;
}
CASE_FLT_FN (BUILT_IN_CONJ):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
break;
CASE_FLT_FN (BUILT_IN_CREAL):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
break;
CASE_FLT_FN (BUILT_IN_CCOS):
return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
-
+
CASE_FLT_FN (BUILT_IN_CCOSH):
return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
-
+
#ifdef HAVE_mpc
CASE_FLT_FN (BUILT_IN_CSIN):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_sin);
break;
-
+
CASE_FLT_FN (BUILT_IN_CSINH):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_sinh);
break;
-
+
CASE_FLT_FN (BUILT_IN_CTAN):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_tan);
break;
-
+
CASE_FLT_FN (BUILT_IN_CTANH):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_tanh);
break;
-
+
CASE_FLT_FN (BUILT_IN_CLOG):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_log);
break;
-
+
CASE_FLT_FN (BUILT_IN_CSQRT):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_sqrt);
break;
-
+
#ifdef HAVE_mpc_arc
CASE_FLT_FN (BUILT_IN_CASIN):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_asin);
break;
-
+
CASE_FLT_FN (BUILT_IN_CACOS):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_acos);
break;
-
+
CASE_FLT_FN (BUILT_IN_CATAN):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_atan);
break;
-
+
CASE_FLT_FN (BUILT_IN_CASINH):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_asinh);
break;
-
+
CASE_FLT_FN (BUILT_IN_CACOSH):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_acosh);
break;
-
+
CASE_FLT_FN (BUILT_IN_CATANH):
if (validate_arg (arg0, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return do_mpc_arg1 (arg0, type, mpc_atanh);
break;
#endif /* HAVE_mpc_arc */
#endif /* HAVE_mpc */
-
+
CASE_FLT_FN (BUILT_IN_CABS):
return fold_builtin_cabs (loc, arg0, type, fndecl);
if (validate_arg (arg0, REAL_TYPE))
return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
break;
-
+
CASE_FLT_FN (BUILT_IN_EXP):
return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
if (validate_arg (arg0, REAL_TYPE))
return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
break;
-
+
CASE_FLT_FN (BUILT_IN_LOG):
return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
if (validate_arg (arg0, COMPLEX_TYPE)
&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
&& validate_arg (arg1, COMPLEX_TYPE)
- && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
break;
#endif
ARG2, and ARG3. IGNORE is true if the result of the function call is
ignored. This function returns NULL_TREE if no simplification was
possible. */
-
+
static tree
fold_builtin_4 (location_t loc, tree fndecl,
tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
fixed argument patterns. Foldings that do varargs-to-varargs
transformations, or that match calls with more than 4 arguments,
need to be handled with fold_builtin_varargs instead. */
-
+
#define MAX_ARGS_TO_FOLD_BUILTIN 4
-
+
static tree
fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
{
}
return NULL_TREE;
}
-
+
/* Conveniently construct a function call expression. FNDECL names the
function to be called and ARGLIST is a TREE_LIST of arguments. */
-
+
tree
build_function_call_expr (location_t loc, tree fndecl, tree arglist)
{
/* Conveniently construct a function call expression. FNDECL names the
function to be called, N is the number of arguments, and the "..."
parameters are the argument expressions. */
-
+
tree
build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
{
for (j = skip; j < oldnargs; j++, i++)
buffer[i] = CALL_EXPR_ARG (exp, j);
}
- else
+ else
buffer = CALL_EXPR_ARGP (exp) + skip;
return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
/* Validate a single argument ARG against a tree code CODE representing
a type. */
-
+
static bool
validate_arg (const_tree arg, enum tree_code code)
{
/* If we don't have a movstr we don't want to emit an strcpy
call. We have to do that if the length of the source string
isn't computable (in that case we can use memcpy probably
- later expanding to a sequence of mov instructions). If we
+ later expanding to a sequence of mov instructions). If we
have movstr instructions we can emit strcpy calls. */
if (!HAVE_movstr)
{
arg = SSA_NAME_VAR (arg);
/* We destructively modify the call to be __builtin_va_start (ap, 0)
- or __builtin_next_arg (0) the first time we see it, after checking
+ or __builtin_next_arg (0) the first time we see it, after checking
the arguments and if needed issuing a warning. */
if (!integer_zerop (arg))
{
int nargs = call_expr_nargs (exp);
/* Verify the required arguments in the original call. */
-
+
if (nargs < 4)
return;
size = CALL_EXPR_ARG (exp, 2);
bool inclusive)
{
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg);
/* To proceed, MPFR must exactly represent the target floating point
mpfr_clear (m);
}
}
-
+
return result;
}
int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
{
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
mpfr_clears (m1, m2, NULL);
}
}
-
+
return result;
}
int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
{
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
STRIP_NOPS (arg3);
mpfr_clears (m1, m2, m3, NULL);
}
}
-
+
return result;
}
{
tree const type = TREE_TYPE (arg);
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg);
-
+
/* To proceed, MPFR must exactly represent the target floating point
format, which only happens when the target base equals two. */
if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
mpfr_clear (m);
}
}
-
+
return result;
}
{
tree const type = TREE_TYPE (arg0);
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
-
+
/* To proceed, MPFR must exactly represent the target floating point
format, which only happens when the target base equals two. */
if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
tree result = NULL_TREE;
STRIP_NOPS (arg);
-
+
/* To proceed, MPFR must exactly represent the target floating point
format, which only happens when the target base equals two. Also
verify ARG is a constant and that ARG_SG is an int pointer. */
do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
{
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg);
/* To proceed, MPFR must exactly represent the target floating point
const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
int inexact;
mpc_t m;
-
+
mpc_init2 (m, prec);
mpfr_from_real (mpc_realref(m), re, rnd);
mpfr_from_real (mpc_imagref(m), im, rnd);
int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
{
tree result = NULL_TREE;
-
+
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
int inexact;
mpc_t m0, m1;
-
+
mpc_init2 (m0, prec);
mpc_init2 (m1, prec);
mpfr_from_real (mpc_realref(m0), re0, rnd);
DEF_EXT_LIB_BUILTIN (BUILT_IN_FFSLL, "ffsll", BT_FN_INT_LONGLONG, ATTR_CONST_NOTHROW_LIST)
DEF_EXT_LIB_BUILTIN (BUILT_IN_FORK, "fork", BT_FN_PID, ATTR_NOTHROW_LIST)
DEF_GCC_BUILTIN (BUILT_IN_FRAME_ADDRESS, "frame_address", BT_FN_PTR_UINT, ATTR_NULL)
-DEF_LIB_BUILTIN (BUILT_IN_FREE, "free", BT_FN_VOID_PTR, ATTR_NOTHROW_LIST)
+DEF_LIB_BUILTIN (BUILT_IN_FREE, "free", BT_FN_VOID_PTR, ATTR_NOTHROW_LIST)
DEF_GCC_BUILTIN (BUILT_IN_FROB_RETURN_ADDR, "frob_return_addr", BT_FN_PTR_PTR, ATTR_NULL)
DEF_EXT_LIB_BUILTIN (BUILT_IN_GETTEXT, "gettext", BT_FN_STRING_CONST_STRING, ATTR_FORMAT_ARG_1)
DEF_C99_BUILTIN (BUILT_IN_IMAXABS, "imaxabs", BT_FN_INTMAX_INTMAX, ATTR_CONST_NOTHROW_LIST)
void
constant_expression_warning (tree value)
{
- if (warn_overflow && pedantic
+ if (warn_overflow && pedantic
&& (TREE_CODE (value) == INTEGER_CST || TREE_CODE (value) == REAL_CST
|| TREE_CODE (value) == FIXED_CST
|| TREE_CODE (value) == VECTOR_CST
case INTEGER_CST:
warning_at (loc, OPT_Woverflow, "integer overflow in expression");
break;
-
+
case REAL_CST:
warning_at (loc, OPT_Woverflow,
"floating point overflow in expression");
break;
-
+
case FIXED_CST:
warning_at (loc, OPT_Woverflow, "fixed-point overflow in expression");
break;
case VECTOR_CST:
warning_at (loc, OPT_Woverflow, "vector overflow in expression");
break;
-
+
case COMPLEX_CST:
if (TREE_CODE (TREE_REALPART (value)) == INTEGER_CST)
warning_at (loc, OPT_Woverflow,
had CODE_LEFT and CODE_RIGHT, into an expression of type TYPE. */
void
warn_logical_operator (location_t location, enum tree_code code, tree type,
- enum tree_code code_left, tree op_left,
+ enum tree_code code_left, tree op_left,
enum tree_code ARG_UNUSED (code_right), tree op_right)
{
int or_op = (code == TRUTH_ORIF_EXPR || code == TRUTH_OR_EXPR);
if (rhs && TREE_CODE (rhs) == C_MAYBE_CONST_EXPR)
rhs = C_MAYBE_CONST_EXPR_EXPR (rhs);
-
+
/* If this is an OR operation, invert both sides; we will invert
again at the end. */
if (or_op)
in0_p = !in0_p, in1_p = !in1_p;
-
+
/* If both expressions are the same, if we can merge the ranges, and we
can build the range test, return it or it inverted. */
if (lhs && rhs && operand_equal_p (lhs, rhs, 0)
}
else
{
- /* warn_strict_aliasing >= 3. This includes the default (3).
+ /* warn_strict_aliasing >= 3. This includes the default (3).
Only warn if the cast is dereferenced immediately. */
alias_set_type set1 =
get_alias_set (TREE_TYPE (TREE_OPERAND (expr, 0)));
{
case 1:
if (TYPE_MAIN_VARIANT (type) != integer_type_node)
- pedwarn (input_location, OPT_Wmain, "first argument of %q+D should be %<int%>",
+ pedwarn (input_location, OPT_Wmain, "first argument of %q+D should be %<int%>",
decl);
break;
both args are zero-extended or both are sign-extended.
Otherwise, we might change the result.
Eg, (short)-1 | (unsigned short)-1 is (int)-1
- but calculated in (unsigned short) it would be (unsigned short)-1.
+ but calculated in (unsigned short) it would be (unsigned short)-1.
*/
tree shorten_binary_op (tree result_type, tree op0, tree op1, bool bitwise)
{
from signed char and that RESULT_TYPE is long long int.
If we explicitly cast OP0 to RESULT_TYPE, OP0 would look
like
-
+
(long long int) (unsigned int) signed_char
which get_narrower would narrow down to
-
+
(unsigned int) signed char
-
+
If we do not cast OP0 first, get_narrower would return
signed_char, which is inconsistent with the case of the
explicit cast. */
/* Handle the case that OP0 (or OP1) does not *contain* a conversion
but it *requires* conversion to FINAL_TYPE. */
-
+
if ((TYPE_PRECISION (TREE_TYPE (op0))
== TYPE_PRECISION (TREE_TYPE (arg0)))
&& TREE_TYPE (op0) != result_type)
== TYPE_PRECISION (TREE_TYPE (arg1)))
&& TREE_TYPE (op1) != result_type)
unsigned1 = TYPE_UNSIGNED (TREE_TYPE (op1));
-
+
/* Now UNSIGNED0 is 1 if ARG0 zero-extends to FINAL_TYPE. */
-
+
/* For bitwise operations, signedness of nominal type
does not matter. Consider only how operands were extended. */
if (bitwise)
uns = unsigned0;
-
+
/* Note that in all three cases below we refrain from optimizing
an unsigned operation on sign-extended args.
That would not be valid. */
-
+
/* Both args variable: if both extended in same way
from same width, do it in that width.
Do it unsigned if args were zero-extended. */
/* Conversion from boolean to a signed:1 bit-field (which only
can hold the values 0 and -1) doesn't lose information - but
it does change the value. */
- if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
+ if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
warning (OPT_Wconversion,
"conversion to %qT from boolean expression", type);
return;
&& TREE_CODE (type) == INTEGER_TYPE
&& !int_fits_type_p (expr, type))
{
- if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type)
+ if (TYPE_UNSIGNED (type) && !TYPE_UNSIGNED (expr_type)
&& tree_int_cst_sgn (expr) < 0)
warning (OPT_Wsign_conversion,
"negative integer implicitly converted to unsigned type");
tree op1 = TREE_OPERAND (expr, 1);
tree op2 = TREE_OPERAND (expr, 2);
- if ((TREE_CODE (op1) == REAL_CST || TREE_CODE (op1) == INTEGER_CST
+ if ((TREE_CODE (op1) == REAL_CST || TREE_CODE (op1) == INTEGER_CST
|| TREE_CODE (op1) == COND_EXPR)
&& (TREE_CODE (op2) == REAL_CST || TREE_CODE (op2) == INTEGER_CST
|| TREE_CODE (op2) == COND_EXPR))
expr_type = TREE_TYPE (expr);
/* Don't warn for short y; short x = ((int)y & 0xff); */
- if (TREE_CODE (expr) == BIT_AND_EXPR
- || TREE_CODE (expr) == BIT_IOR_EXPR
+ if (TREE_CODE (expr) == BIT_AND_EXPR
+ || TREE_CODE (expr) == BIT_IOR_EXPR
|| TREE_CODE (expr) == BIT_XOR_EXPR)
{
/* If both args were extended from a shortest type,
use that type if that is safe. */
- expr_type = shorten_binary_op (expr_type,
- TREE_OPERAND (expr, 0),
- TREE_OPERAND (expr, 1),
+ expr_type = shorten_binary_op (expr_type,
+ TREE_OPERAND (expr, 0),
+ TREE_OPERAND (expr, 1),
/* bitwise */1);
if (TREE_CODE (expr) == BIT_AND_EXPR)
&& int_fits_type_p (op0, c_common_unsigned_type (type)))
|| (TREE_CODE (op1) == INTEGER_CST
&& int_fits_type_p (op1, c_common_signed_type (type))
- && int_fits_type_p (op1,
+ && int_fits_type_p (op1,
c_common_unsigned_type (type))))
return;
/* If constant is unsigned and fits in the target
type, then the result will also fit. */
else if ((TREE_CODE (op0) == INTEGER_CST
- && unsigned0
+ && unsigned0
&& int_fits_type_p (op0, type))
|| (TREE_CODE (op1) == INTEGER_CST
&& unsigned1
}
}
/* Warn for integer types converted to smaller integer types. */
- if (TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
+ if (TYPE_PRECISION (type) < TYPE_PRECISION (expr_type))
give_warning = true;
/* When they are the same width but different signedness,
{
tree type_low_bound = TYPE_MIN_VALUE (expr_type);
tree type_high_bound = TYPE_MAX_VALUE (expr_type);
- REAL_VALUE_TYPE real_low_bound
+ REAL_VALUE_TYPE real_low_bound
= real_value_from_int_cst (0, type_low_bound);
- REAL_VALUE_TYPE real_high_bound
+ REAL_VALUE_TYPE real_high_bound
= real_value_from_int_cst (0, type_high_bound);
if (!exact_real_truncate (TYPE_MODE (type), &real_low_bound)
else
conversion_warning (type, expr);
}
- else if (!int_fits_type_p (expr, c_common_unsigned_type (type)))
+ else if (!int_fits_type_p (expr, c_common_unsigned_type (type)))
warning (OPT_Woverflow,
"overflow in implicit constant conversion");
/* No warning for converting 0x80000000 to int. */
if (TREE_TYPE (expr) == type)
return expr;
-
+
result = convert (type, expr);
if (c_inhibit_evaluation_warnings == 0
{
call_expr_arg_iterator iter;
tree arg;
- tmp_before = tmp_nosp = 0;
+ tmp_before = tmp_nosp = 0;
verify_tree (CALL_EXPR_FN (x), &tmp_before, &tmp_nosp, NULL_TREE);
FOR_EACH_CALL_EXPR_ARG (arg, iter, x)
{
&& !(TREE_CODE (primop0) == INTEGER_CST
&& !TREE_OVERFLOW (convert (c_common_signed_type (type),
primop0))))
- warning (OPT_Wtype_limits,
+ warning (OPT_Wtype_limits,
"comparison of unsigned expression >= 0 is always true");
value = truthvalue_true_node;
break;
&& !(TREE_CODE (primop0) == INTEGER_CST
&& !TREE_OVERFLOW (convert (c_common_signed_type (type),
primop0))))
- warning (OPT_Wtype_limits,
+ warning (OPT_Wtype_limits,
"comparison of unsigned expression < 0 is always false");
value = truthvalue_false_node;
break;
if (TREE_CODE (TREE_TYPE (result_type)) == VOID_TYPE)
{
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer of type %<void *%> used in arithmetic");
size_exp = integer_one_node;
}
else if (TREE_CODE (TREE_TYPE (result_type)) == FUNCTION_TYPE)
{
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to a function used in arithmetic");
size_exp = integer_one_node;
}
else if (TREE_CODE (TREE_TYPE (result_type)) == METHOD_TYPE)
{
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to member function used in arithmetic");
size_exp = integer_one_node;
}
if (TREE_TYPE (expr) == truthvalue_type_node)
return expr;
expr = build2 (TREE_CODE (expr), truthvalue_type_node,
- c_common_truthvalue_conversion (location,
+ c_common_truthvalue_conversion (location,
TREE_OPERAND (expr, 0)),
c_common_truthvalue_conversion (location,
TREE_OPERAND (expr, 1)));
{
expr = build2 (COMPOUND_EXPR, truthvalue_type_node,
TREE_OPERAND (expr, 1),
- c_common_truthvalue_conversion
+ c_common_truthvalue_conversion
(location, TREE_OPERAND (expr, 0)));
goto ret;
}
if (is_sizeof)
{
if (complain && (pedantic || warn_pointer_arith))
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"invalid application of %<sizeof%> to a function type");
else if (!complain)
return error_mark_node;
{
if (type_code == VOID_TYPE
&& complain && (pedantic || warn_pointer_arith))
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"invalid application of %qs to a void type", op_name);
else if (!complain)
return error_mark_node;
/* Only supported decimal floating point extension if the target
actually supports underlying modes. */
- if (targetm.scalar_mode_supported_p (SDmode)
+ if (targetm.scalar_mode_supported_p (SDmode)
&& targetm.scalar_mode_supported_p (DDmode)
&& targetm.scalar_mode_supported_p (TDmode))
{
/* Case ranges are a GNU extension. */
if (high_value)
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"range expressions in switch statements are non-standard");
type = TREE_TYPE (cond);
if (!args)
return DEFAULT_INIT_PRIORITY;
-
+
if (!SUPPORTS_INIT_PRIORITY)
{
if (is_destructor)
if (is_destructor)
warning (0,
"destructor priorities from 0 to %d are reserved "
- "for the implementation",
+ "for the implementation",
MAX_RESERVED_INIT_PRIORITY);
else
warning (0,
"constructor priorities from 0 to %d are reserved "
- "for the implementation",
+ "for the implementation",
MAX_RESERVED_INIT_PRIORITY);
}
return pri;
&& current_function_decl != NULL_TREE
&& !TREE_STATIC (decl))
{
- error_at (DECL_SOURCE_LOCATION (decl),
+ error_at (DECL_SOURCE_LOCATION (decl),
"section attribute cannot be specified for "
"local variables");
*no_add_attrs = true;
*no_add_attrs = true;
}
else if ((TREE_CODE (decl) == FUNCTION_DECL && DECL_INITIAL (decl))
- || (TREE_CODE (decl) != FUNCTION_DECL
+ || (TREE_CODE (decl) != FUNCTION_DECL
&& TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
/* A static variable declaration is always a tentative definition,
but the alias is a non-tentative definition which overrides. */
- || (TREE_CODE (decl) != FUNCTION_DECL
+ || (TREE_CODE (decl) != FUNCTION_DECL
&& ! TREE_PUBLIC (decl) && DECL_INITIAL (decl)))
{
error ("%q+D defined both normally and as an alias", decl);
tree position = TREE_VALUE (args);
if (TREE_CODE (position) != INTEGER_CST
- || TREE_INT_CST_HIGH (position)
+ || TREE_INT_CST_HIGH (position)
|| TREE_INT_CST_LOW (position) < 1
|| TREE_INT_CST_LOW (position) > arg_count )
{
- warning (OPT_Wattributes,
+ warning (OPT_Wattributes,
"alloc_size parameter outside range");
*no_add_attrs = true;
return NULL_TREE;
if (TREE_CODE (position) != INTEGER_CST)
{
- warning (OPT_Wattributes,
+ warning (OPT_Wattributes,
"requested position is not an integer constant");
*no_add_attrs = true;
}
bool * ARG_UNUSED (no_add_attrs))
{
tree params;
-
+
/* Ensure we have a function type. */
gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
-
+
params = TYPE_ARG_TYPES (*node);
while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
params = TREE_CHAIN (params);
if (validate_nargs (fndecl, nargs, 6))
{
unsigned i;
-
+
for (i=0; i<5; i++)
if (TREE_CODE (args[i]) != INTEGER_CST)
{
TOKEN, which had the associated VALUE. */
void
-c_parse_error (const char *gmsgid, enum cpp_ttype token_type,
+c_parse_error (const char *gmsgid, enum cpp_ttype token_type,
tree value, unsigned char token_flags)
{
#define catenate_messages(M1, M2) catenate_strings ((M1), (M2), sizeof (M2))
if (token_type == CPP_EOF)
message = catenate_messages (gmsgid, " at end of input");
- else if (token_type == CPP_CHAR
- || token_type == CPP_WCHAR
+ else if (token_type == CPP_CHAR
+ || token_type == CPP_WCHAR
|| token_type == CPP_CHAR16
|| token_type == CPP_CHAR32)
{
free (message);
message = NULL;
}
- else if (token_type == CPP_STRING
- || token_type == CPP_WSTRING
+ else if (token_type == CPP_STRING
+ || token_type == CPP_WSTRING
|| token_type == CPP_STRING16
|| token_type == CPP_STRING32
|| token_type == CPP_UTF8STRING)
/* Make sure we have the canonical MAIN_TYPE. */
hashcode = iterative_hash_object (TYPE_HASH (unqual_elt), hashcode);
- hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (main_type)),
+ hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (main_type)),
hashcode);
main_type = type_hash_canon (hashcode, main_type);
else if (TYPE_CANONICAL (TREE_TYPE (main_type)) != TREE_TYPE (main_type)
|| (TYPE_CANONICAL (TYPE_DOMAIN (main_type))
!= TYPE_DOMAIN (main_type)))
- TYPE_CANONICAL (main_type)
+ TYPE_CANONICAL (main_type)
= build_array_type (TYPE_CANONICAL (TREE_TYPE (main_type)),
TYPE_CANONICAL (TYPE_DOMAIN (main_type)));
else
|| ((CODE) != INTEGER_CST \
&& (integer_onep (ARG) || integer_zerop (ARG))))
- switch (code)
+ switch (code)
{
case LSHIFT_EXPR:
if (code_left == PLUS_EXPR || code_right == PLUS_EXPR)
The arguments of this function map directly to local variables
of build_binary_op. */
-void
+void
warn_for_sign_compare (location_t location,
- tree orig_op0, tree orig_op1,
- tree op0, tree op1,
+ tree orig_op0, tree orig_op1,
+ tree op0, tree op1,
tree result_type, enum tree_code resultcode)
{
int op0_signed = !TYPE_UNSIGNED (TREE_TYPE (orig_op0));
int op1_signed = !TYPE_UNSIGNED (TREE_TYPE (orig_op1));
int unsignedp0, unsignedp1;
-
+
/* In C++, check for comparison of different enum types. */
if (c_dialect_cxx()
&& TREE_CODE (TREE_TYPE (orig_op0)) == ENUMERAL_TYPE
if (op0_signed)
sop = orig_op0, uop = orig_op1;
- else
+ else
sop = orig_op1, uop = orig_op0;
- STRIP_TYPE_NOPS (sop);
+ STRIP_TYPE_NOPS (sop);
STRIP_TYPE_NOPS (uop);
base_type = (TREE_CODE (result_type) == COMPLEX_TYPE
? TREE_TYPE (result_type) : result_type);
&& int_fits_type_p (TYPE_MAX_VALUE (TREE_TYPE (uop)),
c_common_signed_type (base_type)))
/* OK */;
- else
+ else
warning_at (location,
- OPT_Wsign_compare,
+ OPT_Wsign_compare,
"comparison between signed and unsigned integer expressions");
}
-
+
/* Warn if two unsigned values are being compared in a size larger
than their original size, and one (and only one) is the result of
a `~' operator. This comparison will always fail.
-
+
Also warn if one operand is a constant, and the constant does not
have all bits set that are set in the ~ operand when it is
extended. */
op0 = get_narrower (op0, &unsignedp0);
op1 = get_narrower (op1, &unsignedp1);
-
+
if ((TREE_CODE (op0) == BIT_NOT_EXPR)
^ (TREE_CODE (op1) == BIT_NOT_EXPR))
{
HOST_WIDE_INT constant, mask;
int unsignedp;
unsigned int bits;
-
+
if (host_integerp (op0, 0))
{
primop = op1;
unsignedp = unsignedp0;
constant = tree_low_cst (op1, 0);
}
-
+
bits = TYPE_PRECISION (TREE_TYPE (primop));
if (bits < TYPE_PRECISION (result_type)
&& bits < HOST_BITS_PER_LONG && unsignedp)
if ((mask & constant) != mask)
{
if (constant == 0)
- warning (OPT_Wsign_compare,
+ warning (OPT_Wsign_compare,
"promoted ~unsigned is always non-zero");
else
- warning_at (location, OPT_Wsign_compare,
+ warning_at (location, OPT_Wsign_compare,
"comparison of promoted ~unsigned with constant");
}
}
extern void warn_for_unused_label (tree label);
extern void warn_for_div_by_zero (location_t, tree divisor);
extern void warn_for_sign_compare (location_t,
- tree orig_op0, tree orig_op1,
- tree op0, tree op1,
- tree result_type,
+ tree orig_op0, tree orig_op1,
+ tree op0, tree op1,
+ tree result_type,
enum tree_code resultcode);
extern void set_underlying_type (tree x);
extern bool is_typedef_decl (tree x);
static void builtin_define_type_minmax (const char *, const char *, tree);
static void builtin_define_type_precision (const char *, tree);
static void builtin_define_type_sizeof (const char *, tree);
-static void builtin_define_float_constants (const char *,
+static void builtin_define_float_constants (const char *,
const char *,
const char *,
tree);
/* Define the float.h constants for TYPE using NAME_PREFIX, FP_SUFFIX,
and FP_CAST. */
static void
-builtin_define_float_constants (const char *name_prefix,
- const char *fp_suffix,
- const char *fp_cast,
+builtin_define_float_constants (const char *name_prefix,
+ const char *fp_suffix,
+ const char *fp_cast,
tree type)
{
/* Used to convert radix-based values to base 10 values in several cases.
construct the following numbers directly as a hexadecimal
constants. */
get_max_float (fmt, buf, sizeof (buf));
-
+
sprintf (name, "__%s_MAX__", name_prefix);
builtin_define_with_hex_fp_value (name, type, decimal_dig, buf, fp_suffix, fp_cast);
/* Define __DECx__ constants for TYPE using NAME_PREFIX and SUFFIX. */
static void
-builtin_define_decimal_float_constants (const char *name_prefix,
- const char *suffix,
+builtin_define_decimal_float_constants (const char *name_prefix,
+ const char *suffix,
tree type)
{
const struct real_format *fmt;
/* Compute the minimum representable value. */
sprintf (name, "__%s_MIN__", name_prefix);
sprintf (buf, "1E%d%s", fmt->emin - 1, suffix);
- builtin_define_with_value (name, buf, 0);
+ builtin_define_with_value (name, buf, 0);
/* Compute the maximum representable value. */
sprintf (name, "__%s_MAX__", name_prefix);
*p = 0;
/* fmt->p plus 1, to account for the decimal point and fmt->emax
minus 1 because the digits are nines, not 1.0. */
- sprintf (&buf[fmt->p + 1], "E%d%s", fmt->emax - 1, suffix);
+ sprintf (&buf[fmt->p + 1], "E%d%s", fmt->emax - 1, suffix);
builtin_define_with_value (name, buf, 0);
/* Compute epsilon (the difference between 1 and least value greater
*p++ = '.';
}
*p = 0;
- sprintf (&buf[fmt->p], "1E%d%s", fmt->emin - 1, suffix);
+ sprintf (&buf[fmt->p], "1E%d%s", fmt->emin - 1, suffix);
builtin_define_with_value (name, buf, 0);
}
static void
builtin_define_with_hex_fp_value (const char *macro,
tree type, int digits,
- const char *hex_str,
+ const char *hex_str,
const char *fp_suffix,
const char *fp_cast)
{
sprintf (buf1, "%s%s", dec_str, fp_suffix);
sprintf (buf2, fp_cast, buf1);
sprintf (buf1, "%s=%s", macro, buf2);
-
+
cpp_define (parse_in, buf1);
}
error ("label %q+D used but not defined", p);
DECL_INITIAL (p) = error_mark_node;
}
- else
+ else
warn_for_unused_label (p);
/* Labels go in BLOCK_VARS. */
}
else if (warn_traditional)
{
- warned |= warning (OPT_Wtraditional,
+ warned |= warning (OPT_Wtraditional,
"non-static declaration of %q+D "
"follows static declaration", newdecl);
}
}
else if (warn_traditional)
{
- warned |= warning (OPT_Wtraditional,
+ warned |= warning (OPT_Wtraditional,
"non-static declaration of %q+D "
"follows static declaration", newdecl);
}
if (DECL_DECLARED_INLINE_P (newdecl)
&& lookup_attribute ("noinline", DECL_ATTRIBUTES (olddecl)))
{
- warned |= warning (OPT_Wattributes,
+ warned |= warning (OPT_Wattributes,
"inline declaration of %qD follows "
"declaration with attribute noinline", newdecl);
}
else if (DECL_DECLARED_INLINE_P (olddecl)
&& lookup_attribute ("noinline", DECL_ATTRIBUTES (newdecl)))
{
- warned |= warning (OPT_Wattributes,
+ warned |= warning (OPT_Wattributes,
"declaration of %q+D with attribute "
"noinline follows inline declaration ", newdecl);
}
if (flag_isoc99)
warned = pedwarn (input_location, OPT_Wimplicit_function_declaration,
"implicit declaration of function %qE", id);
- else
- warned = warning (OPT_Wimplicit_function_declaration,
+ else
+ warned = warning (OPT_Wimplicit_function_declaration,
G_("implicit declaration of function %qE"), id);
if (olddecl && warned)
locate_old_decl (olddecl);
if (current_function_decl
/* For invalid programs like this:
-
+
void foo()
const char* p = __FUNCTION__;
-
+
the __FUNCTION__ is believed to appear in K&R style function
parameter declarator. In that case we still don't have
function_scope. */
}
else
{
- if (name)
+ if (name)
pedwarn (input_location, OPT_Wvla,
"ISO C90 forbids variable length array %qE",
name);
else
{
if (name)
- pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wimplicit_int,
+ pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wimplicit_int,
"type defaults to %<int%> in declaration of %qE",
name);
else
- pedwarn_c99 (input_location, flag_isoc99 ? 0 : OPT_Wimplicit_int,
+ pedwarn_c99 (input_location, flag_isoc99 ? 0 : OPT_Wimplicit_int,
"type defaults to %<int%> in type name");
}
}
|| storage_class == csc_typedef))
{
if (storage_class == csc_auto)
- pedwarn (loc,
- (current_scope == file_scope) ? 0 : OPT_pedantic,
+ pedwarn (loc,
+ (current_scope == file_scope) ? 0 : OPT_pedantic,
"function definition declared %<auto%>");
if (storage_class == csc_register)
error_at (loc, "function definition declared %<register%>");
if (pedantic && TREE_CODE (t) == RECORD_TYPE
&& flexible_array_type_p (TREE_TYPE (x)))
- pedwarn (DECL_SOURCE_LOCATION (x), OPT_pedantic,
+ pedwarn (DECL_SOURCE_LOCATION (x), OPT_pedantic,
"invalid use of structure with flexible array member");
if (DECL_NAME (x))
(6.4.4.3/2 in the C99 Standard). GCC allows any integer type as
an extension. */
else if (!int_fits_type_p (value, integer_type_node))
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C restricts enumerator values to range of %<int%>");
/* The ISO C Standard mandates enumerators to have type int, even
}
if (warn_about_return_type)
- pedwarn_c99 (loc, flag_isoc99 ? 0
+ pedwarn_c99 (loc, flag_isoc99 ? 0
: (warn_return_type ? OPT_Wreturn_type : OPT_Wimplicit_int),
"return type defaults to %<int%>");
if (flag_isoc99)
pedwarn (DECL_SOURCE_LOCATION (decl),
0, "type of %qD defaults to %<int%>", decl);
- else
+ else
warning_at (DECL_SOURCE_LOCATION (decl),
OPT_Wmissing_parameter_type,
"type of %qD defaults to %<int%>", decl);
c_determine_visibility (fndecl);
/* For GNU C extern inline functions disregard inline limits. */
- if (DECL_EXTERNAL (fndecl)
+ if (DECL_EXTERNAL (fndecl)
&& DECL_DECLARED_INLINE_P (fndecl))
DECL_DISREGARD_INLINE_LIMITS (fndecl) = 1;
case RID_DFLOAT32:
case RID_DFLOAT64:
case RID_DFLOAT128:
- {
+ {
const char *str;
if (i == RID_DFLOAT32)
str = "_Decimal32";
&& C_IS_RESERVED_WORD (scspec));
i = C_RID_CODE (scspec);
if (specs->non_sc_seen_p)
- warning (OPT_Wold_style_declaration,
+ warning (OPT_Wold_style_declaration,
"%qE is not at beginning of declaration", scspec);
switch (i)
{
else if (specs->complex_p)
{
specs->typespec_word = cts_double;
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"ISO C does not support plain %<complex%> meaning "
"%<double complex%>");
}
specs->type = char_type_node;
if (specs->complex_p)
{
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"ISO C does not support complex integer types");
specs->type = build_complex_type (specs->type);
}
: integer_type_node);
if (specs->complex_p)
{
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"ISO C does not support complex integer types");
specs->type = build_complex_type (specs->type);
}
scalar_identity_flag = 0;
if (fli)
{
- while (fli->name != 0
+ while (fli->name != 0
&& strncmp (fli->name, format_chars, strlen (fli->name)))
fli++;
if (fli->name != 0)
}
*value = build_string (tok->val.str.len, (const char *) tok->val.str.text);
break;
-
+
case CPP_PRAGMA:
*value = build_int_cst (NULL, tok->val.pragma);
break;
type = integer_types[itk];
if (itk > itk_unsigned_long
&& (flags & CPP_N_WIDTH) != CPP_N_LARGE)
- emit_diagnostic
+ emit_diagnostic
((c_dialect_cxx () ? cxx_dialect == cxx98 : !flag_isoc99)
? DK_PEDWARN : DK_WARNING,
input_location, OPT_Wlong_long,
- (flags & CPP_N_UNSIGNED)
+ (flags & CPP_N_UNSIGNED)
? "integer constant is too large for %<unsigned long%> type"
: "integer constant is too large for %<long%> type");
}
has any suffixes, cut them off; REAL_VALUE_ATOF/ REAL_VALUE_HTOF
can't handle them. */
copylen = token->val.str.len;
- if (flags & CPP_N_DFLOAT)
+ if (flags & CPP_N_DFLOAT)
copylen -= 2;
- else
+ else
{
if ((flags & CPP_N_WIDTH) != CPP_N_MEDIUM)
/* Must be an F or L or machine defined suffix. */
{
REAL_VALUE_TYPE realvoidmode;
int overflow = real_from_string (&realvoidmode, copy);
- if (overflow < 0 || !REAL_VALUES_EQUAL (realvoidmode, dconst0))
+ if (overflow < 0 || !REAL_VALUES_EQUAL (realvoidmode, dconst0))
warning (OPT_Woverflow, "floating constant truncated to zero");
}
-/* This file contains routines to construct GNU OpenMP constructs,
+/* This file contains routines to construct GNU OpenMP constructs,
called from parsing in the C and C++ front ends.
Copyright (C) 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
input_location, rhs, NULL_TREE);
if (x == error_mark_node)
return error_mark_node;
- gcc_assert (TREE_CODE (x) == MODIFY_EXPR);
+ gcc_assert (TREE_CODE (x) == MODIFY_EXPR);
rhs = TREE_OPERAND (x, 1);
/* Punt the actual generation of atomic operations to common code. */
fail = true;
}
- init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
+ init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
/* FIXME diagnostics: This should
be the location of the INIT. */
elocus,
/* Prevent resetting the language standard to a C dialect when the driver
has already determined that we're looking at assembler input. */
bool preprocessing_asm_p = (cpp_get_options (parse_in)->lang == CLK_ASM);
-
+
switch (code)
{
default:
global_dc->warning_as_error_requested = value;
break;
- case OPT_Werror_implicit_function_declaration:
+ case OPT_Werror_implicit_function_declaration:
/* For backward compatibility, this is the same as
-Werror=implicit-function-declaration. */
- enable_warning_as_error ("implicit-function-declaration", value, CL_C | CL_ObjC);
+ enable_warning_as_error ("implicit-function-declaration", value, CL_C | CL_ObjC);
break;
case OPT_Wformat:
}
/* -Wimplicit-function-declaration is enabled by default for C99. */
- if (warn_implicit_function_declaration == -1)
+ if (warn_implicit_function_declaration == -1)
warn_implicit_function_declaration = flag_isoc99;
/* If we're allowing C++0x constructs, don't warn about C++0x
/* Wlong-long is disabled by default. It is enabled by:
[-pedantic | -Wtraditional] -std=[gnu|c]++98 ; or
- [-pedantic | -Wtraditional] -std=non-c99 .
+ [-pedantic | -Wtraditional] -std=non-c99 .
Either -Wlong-long or -Wno-long-long override any other settings. */
if (warn_long_long == -1)
{
if (c_parser_next_token_is (parser, CPP_EOF))
{
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"ISO C forbids an empty translation unit");
}
else
}
break;
case CPP_SEMICOLON:
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"ISO C does not allow extra %<;%> outside of a function");
c_parser_consume_token (parser);
break;
C we also allow but diagnose declarations without declaration
specifiers, but only at top level (elsewhere they conflict with
other syntax).
-
+
OpenMP:
-
+
declaration:
threadprivate-directive */
/* Parse any stray semicolon. */
if (c_parser_next_token_is (parser, CPP_SEMICOLON))
{
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"extra semicolon in struct or union specified");
c_parser_consume_token (parser);
continue;
else
{
if (c_parser_next_token_is (parser, CPP_CLOSE_BRACE))
- pedwarn (c_parser_peek_token (parser)->location, 0,
+ pedwarn (c_parser_peek_token (parser)->location, 0,
"no semicolon at end of struct or union");
else
{
tree ret;
if (!specs->type_seen_p)
{
- pedwarn (decl_loc, OPT_pedantic,
+ pedwarn (decl_loc, OPT_pedantic,
"ISO C forbids member declarations with no members");
shadow_tag_warned (specs, pedantic);
ret = NULL_TREE;
/* Parse a sequence of array declarators and parameter lists. */
if (c_parser_next_token_is (parser, CPP_OPEN_SQUARE))
{
- location_t brace_loc = c_parser_peek_token (parser)->location;
+ location_t brace_loc = c_parser_peek_token (parser)->location;
struct c_declarator *declarator;
struct c_declspecs *quals_attrs = build_null_declspecs ();
bool static_seen;
/* Old-style structure member designator. */
set_init_label (c_parser_peek_token (parser)->value);
/* Use the colon as the error location. */
- pedwarn (c_parser_peek_2nd_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_2nd_token (parser)->location, OPT_pedantic,
"obsolete use of designated initializer with %<:%>");
c_parser_consume_token (parser);
c_parser_consume_token (parser);
c_parser_consume_token (parser);
set_init_index (first, second);
if (second)
- pedwarn (ellipsis_loc, OPT_pedantic,
+ pedwarn (ellipsis_loc, OPT_pedantic,
"ISO C forbids specifying range of elements to initialize");
}
else
if (c_parser_next_token_is (parser, CPP_EQ))
{
if (!flag_isoc99)
- pedwarn (des_loc, OPT_pedantic,
+ pedwarn (des_loc, OPT_pedantic,
"ISO C90 forbids specifying subobject to initialize");
c_parser_consume_token (parser);
}
else
{
if (des_seen == 1)
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"obsolete use of designated initializer without %<=%>");
else
{
old parser in requiring something after label declarations.
Although they are erroneous if the labels declared aren't defined,
is it useful for the syntax to be this way?
-
+
OpenMP:
-
+
block-item:
openmp-directive
mark_valid_location_for_stdc_pragma (false);
c_parser_declaration_or_fndef (parser, true, true, true, true);
if (last_stmt)
- pedwarn_c90 (loc,
+ pedwarn_c90 (loc,
(pedantic && !flag_isoc99)
? OPT_pedantic
: OPT_Wdeclaration_after_statement,
}
else if (c_parser_next_token_is_keyword (parser, RID_ELSE))
{
- if (parser->in_if_block)
+ if (parser->in_if_block)
{
mark_valid_location_for_stdc_pragma (save_valid_for_pragma);
error_at (loc, """expected %<}%> before %<else%>");
return;
}
- else
+ else
{
error_at (loc, "%<else%> without a previous %<if%>");
c_parser_consume_token (parser);
error_at (c_parser_peek_token (parser)->location,
"a label can only be part of a statement and "
"a declaration is not a statement");
- c_parser_declaration_or_fndef (parser, /*fndef_ok*/ false,
+ c_parser_declaration_or_fndef (parser, /*fndef_ok*/ false,
/*nested*/ true, /*empty_ok*/ false,
/*start_attr_ok*/ true);
}
add_stmt (build_empty_stmt (loc));
c_parser_consume_token (parser);
}
- else
+ else
c_parser_statement_after_labels (parser);
return c_end_compound_stmt (else_loc, block, flag_isoc99);
}
}
/* Parse asm goto labels, a GNU extension.
-
+
asm-goto-operands:
identifier
asm-goto-operands , identifier
if (c_parser_next_token_is (parser, CPP_COLON))
{
tree eptype = NULL_TREE;
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"ISO C forbids omitting the middle term of a ?: expression");
if (TREE_CODE (cond.value) == EXCESS_PRECISION_EXPR)
{
c_parser_compound_statement_nostart (parser);
c_parser_skip_until_found (parser, CPP_CLOSE_PAREN,
"expected %<)%>");
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C forbids braced-groups within expressions");
expr.value = c_finish_stmt_expr (brace_loc, stmt);
}
/* Parse any stray semicolon. */
if (c_parser_next_token_is (parser, CPP_SEMICOLON))
{
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"extra semicolon in struct or union specified");
c_parser_consume_token (parser);
continue;
if (c_parser_next_token_is (parser, CPP_SEMICOLON))
{
c_parser_consume_token (parser);
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"extra semicolon in method definition specified");
}
if (!c_parser_next_token_is (parser, CPP_OPEN_BRACE))
switch (c_parser_peek_token (parser)->type)
{
case CPP_SEMICOLON:
- pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
+ pedwarn (c_parser_peek_token (parser)->location, OPT_pedantic,
"ISO C does not allow extra %<;%> outside of a function");
c_parser_consume_token (parser);
break;
c_parser_consume_pragma (parser);
c_invoke_pragma_handler (id);
- /* Skip to EOL, but suppress any error message. Those will have been
+ /* Skip to EOL, but suppress any error message. Those will have been
generated by the handler routine through calling error, as opposed
to calling c_parser_error. */
parser->error = true;
binop:
+, *, -, /, &, ^, |, <<, >>
- where x is an lvalue expression with scalar type.
+ where x is an lvalue expression with scalar type.
LOC is the location of the #pragma token. */
section-sequence:
section-directive[opt] structured-block
- section-sequence section-directive structured-block
+ section-sequence section-directive structured-block
SECTIONS_LOC is the location of the #pragma omp sections. */
high = ~high + !low;
low = -low;
}
- sprintf (pp_buffer (pp)->digit_buffer, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
+ sprintf (pp_buffer (pp)->digit_buffer, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
(unsigned HOST_WIDE_INT) high, (unsigned HOST_WIDE_INT) low);
pp_string (pp, pp_buffer (pp)->digit_buffer);
}
static void
pp_c_assignment_expression (c_pretty_printer *pp, tree e)
{
- if (TREE_CODE (e) == MODIFY_EXPR
+ if (TREE_CODE (e) == MODIFY_EXPR
|| TREE_CODE (e) == INIT_EXPR)
{
pp_c_unary_expression (pp, TREE_OPERAND (e, 0));
extern struct c_expr c_expr_sizeof_type (location_t, struct c_type_name *);
extern struct c_expr parser_build_unary_op (location_t, enum tree_code,
struct c_expr);
-extern struct c_expr parser_build_binary_op (location_t,
+extern struct c_expr parser_build_binary_op (location_t,
enum tree_code, struct c_expr,
struct c_expr);
extern tree build_conditional_expr (location_t, tree, bool, tree, tree,
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p2));
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
{
TREE_VALUE (n) = composite_type (TREE_TYPE (memb),
TREE_VALUE (p1));
- pedwarn (input_location, OPT_pedantic,
+ pedwarn (input_location, OPT_pedantic,
"function types not truly compatible in ISO C");
goto parm_done;
}
while (TREE_CODE (foo) == COMPONENT_REF)
foo = TREE_OPERAND (foo, 0);
if (TREE_CODE (foo) == VAR_DECL && C_DECL_REGISTER (foo))
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C forbids subscripting %<register%> array");
else if (!flag_isoc99 && !lvalue_p (foo))
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C90 forbids subscripting non-lvalue array");
}
warn_deprecated_use (ref, NULL_TREE);
/* Recursive call does not count as usage. */
- if (ref != current_function_decl)
+ if (ref != current_function_decl)
{
TREE_USED (ref) = 1;
}
tree tem;
int nargs;
tree *argarray;
-
+
/* Strip NON_LVALUE_EXPRs, etc., since we aren't using as an lvalue. */
STRIP_TYPE_NOPS (function);
&& !strncmp (IDENTIFIER_POINTER (name), "__builtin_", 10))
{
if (require_constant_value)
- result =
+ result =
fold_build_call_array_initializer_loc (loc, TREE_TYPE (fntype),
function, nargs, argarray);
else
warning_at (location, OPT_Waddress,
"comparison with string literal results in unspecified behavior");
- if (TREE_OVERFLOW_P (result.value)
- && !TREE_OVERFLOW_P (arg1.value)
+ if (TREE_OVERFLOW_P (result.value)
+ && !TREE_OVERFLOW_P (arg1.value)
&& !TREE_OVERFLOW_P (arg2.value))
overflow_warning (location, result.value);
if (TREE_CODE (target_type) == VOID_TYPE)
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer of type %<void *%> used in subtraction");
if (TREE_CODE (target_type) == FUNCTION_TYPE)
- pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (loc, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"pointer to a function used in subtraction");
/* If the conversion to ptrdiff_type does anything like widening or
else if (typecode == COMPLEX_TYPE)
{
code = CONJ_EXPR;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C does not support %<~%> for complex conjugation");
if (!noconvert)
arg = default_conversion (arg);
{
tree real, imag;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C does not support %<++%> and %<--%> on complex types");
arg = stabilize_reference (arg);
|| TREE_CODE (TREE_TYPE (argtype)) == VOID_TYPE)
{
if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
- pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"wrong type argument to increment");
else
- pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
+ pedwarn (location, pedantic ? OPT_pedantic : OPT_Wpointer_arith,
"wrong type argument to decrement");
}
&& CONVERT_EXPR_P (TREE_OPERAND (expr1, 1)))
; /* (void) a, (void) b, c */
else
- warning_at (loc, OPT_Wunused_value,
+ warning_at (loc, OPT_Wunused_value,
"left-hand operand of comma expression has no effect");
}
}
while (TREE_CODE (in_type) == POINTER_TYPE);
}
-/* Build an expression representing a cast to type TYPE of expression EXPR.
+/* Build an expression representing a cast to type TYPE of expression EXPR.
LOC is the location of the cast-- typically the open paren of the cast. */
tree
{
if (TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE)
- pedwarn (loc, OPT_pedantic,
+ pedwarn (loc, OPT_pedantic,
"ISO C forbids casting nonscalar to the same type");
}
else if (TREE_CODE (type) == UNION_TYPE)
tree
build_modify_expr (location_t location, tree lhs, tree lhs_origtype,
- enum tree_code modifycode,
+ enum tree_code modifycode,
location_t rhs_loc, tree rhs, tree rhs_origtype)
{
tree result;
}
if (!fundecl || !DECL_IN_SYSTEM_HEADER (fundecl))
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ISO C prohibits argument conversion to union type");
rhs = fold_convert_loc (location, TREE_TYPE (memb), rhs);
/* ANSI wants warnings about out-of-range constant initializers. */
STRIP_TYPE_NOPS (value);
- if (TREE_STATIC (decl))
+ if (TREE_STATIC (decl))
constant_expression_warning (value);
/* Check if we need to set array size from compound literal size. */
pedwarn (location, opt, "(near initialization for %qs)", ofwhat);
}
-/* Issue a warning for a bad initializer component.
+/* Issue a warning for a bad initializer component.
OPT is the OPT_W* value corresponding to the warning option that
controls this warning. MSGID identifies the message. The
&& TREE_CODE (type) == ARRAY_TYPE
&& TREE_CODE (expr.value) == STRING_CST
&& expr.original_code != STRING_CST)
- pedwarn_init (input_location, OPT_pedantic,
+ pedwarn_init (input_location, OPT_pedantic,
"array initialized from parenthesized string constant");
}
if ((warn_return_type || flag_isoc99)
&& valtype != 0 && TREE_CODE (valtype) != VOID_TYPE)
{
- pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wreturn_type,
+ pedwarn_c99 (loc, flag_isoc99 ? 0 : OPT_Wreturn_type,
"%<return%> with no value, in "
"function returning non-void");
no_warning = true;
{
current_function_returns_null = 1;
if (TREE_CODE (TREE_TYPE (retval)) != VOID_TYPE)
- pedwarn (loc, 0,
+ pedwarn (loc, 0,
"%<return%> with a value, in function returning void");
- else
+ else
pedwarn (loc, OPT_pedantic, "ISO C forbids "
"%<return%> with expression, in function returning void");
}
{
result_type = type0;
if (pedantic)
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ordered comparison of pointer with integer zero");
else if (extra_warnings)
warning_at (location, OPT_Wextra,
else if (code1 == POINTER_TYPE && null_pointer_constant_p (orig_op0))
{
result_type = type1;
- pedwarn (location, OPT_pedantic,
+ pedwarn (location, OPT_pedantic,
"ordered comparison of pointer with integer zero");
}
else if (code0 == POINTER_TYPE && code1 == INTEGER_TYPE)
if (shorten && none_complex)
{
final_type = result_type;
- result_type = shorten_binary_op (result_type, op0, op1,
+ result_type = shorten_binary_op (result_type, op0, op1,
shorten == -1);
}
else if (TYPE_CANONICAL (element_type) != element_type
|| (domain && TYPE_CANONICAL (domain) != domain))
{
- tree unqualified_canon
+ tree unqualified_canon
= build_array_type (TYPE_CANONICAL (element_type),
- domain? TYPE_CANONICAL (domain)
+ domain? TYPE_CANONICAL (domain)
: NULL_TREE);
- TYPE_CANONICAL (t)
+ TYPE_CANONICAL (t)
= c_build_qualified_type (unqualified_canon, type_quals);
}
else
for (j = 1; j <= MOVE_MAX_WORDS; j++)
regno_save_mem[i][j] = 0;
save_slots_num = 0;
-
+
}
/* The structure represents a hard register which should be saved
{
const struct saved_hard_reg *p1 = *(struct saved_hard_reg * const *) v1p;
const struct saved_hard_reg *p2 = *(struct saved_hard_reg * const *) v2p;
-
+
if (flag_omit_frame_pointer)
{
if (p1->call_freq - p2->call_freq != 0)
int best_slot_num;
int prev_save_slots_num;
rtx prev_save_slots[FIRST_PSEUDO_REGISTER];
-
+
initiate_saved_hard_regs ();
/* Create hard reg saved regs. */
for (chain = reload_insn_chain; chain != 0; chain = next)
{
int r = reg_renumber[regno];
int bound;
-
+
if (r < 0)
continue;
-
+
bound = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
for (; r < bound; r++)
if (TEST_HARD_REG_BIT (used_regs, r))
{
int r = reg_renumber[regno];
int bound;
-
+
if (r < 0)
continue;
/* Now run through all the call-used hard-registers and allocate
space for them in the caller-save area. Try to allocate space
in a manner which allows multi-register saves/restores to be done. */
-
+
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
for (j = MOVE_MAX_WORDS; j > 0; j--)
{
int do_save = 1;
-
+
/* If no mode exists for this size, try another. Also break out
if we have already saved this hard register. */
if (regno_save_mode[i][j] == VOIDmode || regno_save_mem[i][1] != 0)
continue;
-
+
/* See if any register in this group has been saved. */
for (k = 0; k < j; k++)
if (regno_save_mem[i + k][1])
}
if (! do_save)
continue;
-
+
for (k = 0; k < j; k++)
if (! TEST_HARD_REG_BIT (hard_regs_used, i + k))
{
}
if (! do_save)
continue;
-
+
/* We have found an acceptable mode to store in. Since
hard register is always saved in the widest mode
available, the mode may be wider than necessary, it is
= assign_stack_local_1 (regno_save_mode[i][j],
GET_MODE_SIZE (regno_save_mode[i][j]),
0, true);
-
+
/* Setup single word save area just in case... */
for (k = 0; k < j; k++)
/* This should not depend on WORDS_BIG_ENDIAN.
}
/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
- CALL_EXPR EXP.
+ CALL_EXPR EXP.
NUM_ACTUALS is the total number of parameters.
compute and return the final value for MUST_PREALLOCATE. */
static int
-finalize_must_preallocate (int must_preallocate, int num_actuals,
+finalize_must_preallocate (int must_preallocate, int num_actuals,
struct arg_data *args, struct args_size *args_size)
{
/* See if we have or want to preallocate stack space.
EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)
= GGC_CNEW (struct basic_block_def);
EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->index = EXIT_BLOCK;
- ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->next_bb
+ ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->next_bb
= EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun);
- EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->prev_bb
+ EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->prev_bb
= ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun);
}
\f
SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
-
+
if (df)
df_compact_blocks ();
- else
+ else
{
basic_block bb;
-
+
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB (bb)
{
basic_block bb;
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
- bb->flags = (BB_PARTITION (bb)
+ bb->flags = (BB_PARTITION (bb)
| (bb->flags & (BB_DISABLE_SCHEDULE + BB_RTL + BB_NON_LOCAL_GOTO_TARGET)));
}
\f
fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
fprintf (file, ", freq %i", bb->frequency);
/* Both maybe_hot_bb_p & probably_never_executed_bb_p functions
- crash without cfun. */
+ crash without cfun. */
if (cfun && maybe_hot_bb_p (bb))
fputs (", maybe hot", file);
if (cfun && probably_never_executed_bb_p (bb))
/* Dump the register info to FILE. */
-void
+void
dump_reg_info (FILE *file)
{
unsigned int i, max = max_reg_num ();
for (i = FIRST_PSEUDO_REGISTER; i < max; i++)
{
enum reg_class rclass, altclass;
-
+
if (regstat_n_sets_and_refs)
fprintf (file, "\nRegister %d used %d times across %d insns",
i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
else if (df)
fprintf (file, "\nRegister %d used %d times across %d insns",
i, DF_REG_USE_COUNT (i) + DF_REG_DEF_COUNT (i), REG_LIVE_LENGTH (i));
-
+
if (REG_BASIC_BLOCK (i) >= NUM_FIXED_BLOCKS)
fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
if (regstat_n_sets_and_refs)
if (regno_reg_rtx[i] != NULL
&& PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
-
+
rclass = reg_preferred_class (i);
altclass = reg_alternate_class (i);
if (rclass != GENERAL_REGS || altclass != ALL_REGS)
reg_class_names[(int) rclass],
reg_class_names[(int) altclass]);
}
-
+
if (regno_reg_rtx[i] != NULL && REG_POINTER (regno_reg_rtx[i]))
fputs ("; pointer", file);
fputs (".\n", file);
true, unreachable blocks are deleted. */
int
-post_order_compute (int *post_order, bool include_entry_exit,
+post_order_compute (int *post_order, bool include_entry_exit,
bool delete_unreachable)
{
edge_iterator *stack;
post_order[post_order_num++] = ENTRY_BLOCK;
count = post_order_num;
}
- else
+ else
count = post_order_num + 2;
-
+
/* Delete the unreachable blocks if some were found and we are
supposed to do it. */
if (delete_unreachable && (count != n_basic_blocks))
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
{
next_bb = b->next_bb;
-
+
if (!(TEST_BIT (visited, b->index)))
delete_basic_block (b);
}
-
+
tidy_fallthru_edges ();
}
}
-/* Helper routine for inverted_post_order_compute.
+/* Helper routine for inverted_post_order_compute.
BB has to belong to a region of CFG
unreachable by inverted traversal from the exit.
i.e. there's no control flow path from ENTRY to EXIT
This can happen in two cases - if there's an infinite loop
or if there's a block that has no successor
(call to a function with no return).
- Some RTL passes deal with this condition by
- calling connect_infinite_loops_to_exit () and/or
+ Some RTL passes deal with this condition by
+ calling connect_infinite_loops_to_exit () and/or
add_noreturn_fake_exit_edges ().
However, those methods involve modifying the CFG itself
which may not be desirable.
with no successors can't visit all blocks.
To solve this problem, we first do inverted traversal
starting from the blocks with no successor.
- And if there's any block left that's not visited by the regular
+ And if there's any block left that's not visited by the regular
inverted traversal from EXIT,
those blocks are in such problematic region.
- Among those, we find one block that has
+ Among those, we find one block that has
any visited predecessor (which is an entry into such a region),
- and start looking for a "dead end" from that block
+ and start looking for a "dead end" from that block
and do another inverted traversal from that block. */
int
if (EDGE_COUNT (bb->succs) == 0)
{
/* Push the initial edge on to the stack. */
- if (EDGE_COUNT (bb->preds) > 0)
+ if (EDGE_COUNT (bb->preds) > 0)
{
stack[sp++] = ei_start (bb->preds);
SET_BIT (visited, bb->index);
}
}
- do
+ do
{
bool has_unvisited_bb = false;
}
}
- /* Detect any infinite loop and activate the kludge.
+ /* Detect any infinite loop and activate the kludge.
Note that this doesn't check EXIT_BLOCK itself
since EXIT_BLOCK is always added after the outer do-while loop. */
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
if (has_unvisited_bb && sp == 0)
{
- /* No blocks are reachable from EXIT at all.
+ /* No blocks are reachable from EXIT at all.
Find a dead-end from the ENTRY, and restart the iteration. */
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
gcc_assert (be != NULL);
stack[sp++] = ei_start (be->preds);
}
- /* The only case the below while fires is
+ /* The only case the below while fires is
when there's an infinite loop. */
}
while (sp);
REV_POST_ORDER is nonzero, return the reverse completion number for each
node. Returns the number of nodes visited. A depth first search
tries to get as far away from the starting point as quickly as
- possible.
+ possible.
pre_order is a really a preorder numbering of the graph.
rev_post_order is really a reverse postorder numbering of the graph.
*/
int
-pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
+pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
bool include_entry_exit)
{
edge_iterator *stack;
if (rev_post_order)
rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
}
- else
+ else
rev_post_order_num -= NUM_FIXED_BLOCKS;
/* Allocate bitmap to track nodes that have been visited. */
static sbitmap visited;
static unsigned v_size;
-#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
-#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index))
-#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
+#define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index))
+#define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index))
+#define VISITED_P(BB) (TEST_BIT (visited, (BB)->index))
/* Resize the VISITED sbitmap if necessary. */
- size = last_basic_block;
+ size = last_basic_block;
if (size < 10)
size = 10;
{
tree t;
enum gimple_rhs_class grhs_class;
-
+
grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
if (grhs_class == GIMPLE_BINARY_RHS)
gcc_assert (index < stack_vars_conflict_alloc);
return stack_vars_conflict[index];
}
-
+
/* Returns true if TYPE is or contains a union type. */
static bool
/* A subroutine of expand_used_vars. Expand one variable according to
its flavor. Variables to be placed on the stack are not actually
- expanded yet, merely recorded.
+ expanded yet, merely recorded.
When REALLY_EXPAND is false, only add stack values to be allocated.
Return stack usage this variable is supposed to take.
*/
}
/* Prepare for expanding variables. */
-static void
+static void
init_vars_expansion (void)
{
tree t;
return (rtx) *elt;
/* Find the tree label if it is present. */
-
+
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
lab_stmt = gsi_stmt (gsi);
/* Ignore this stmt if it is in the list of
replaceable expressions. */
if (SA.values
- && bitmap_bit_p (SA.values,
+ && bitmap_bit_p (SA.values,
SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
continue;
}
if (! SUPPORTS_STACK_ALIGNMENT)
return;
-
+
if (cfun->calls_alloca
|| cfun->has_nonlocal_label
|| crtl->has_nonlocal_goto)
/* Target has to redefine TARGET_GET_DRAP_RTX to support stack
alignment. */
gcc_assert (targetm.calls.get_drap_rtx != NULL);
- drap_rtx = targetm.calls.get_drap_rtx ();
+ drap_rtx = targetm.calls.get_drap_rtx ();
/* stack_realign_drap and drap_rtx must match. */
gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
if (warn_stack_protect)
{
if (cfun->calls_alloca)
- warning (OPT_Wstack_protector,
+ warning (OPT_Wstack_protector,
"not protecting local variables: variable length buffer");
if (has_short_buffer && !crtl->stack_protect_guard)
- warning (OPT_Wstack_protector,
+ warning (OPT_Wstack_protector,
"not protecting function: no buffer at least %d bytes long",
(int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
}
&& dummy->loop_father->header == dummy
&& dummy->loop_father->latch == e_src)
dummy->loop_father->latch = jump;
-
+
if (new_bb_cbk != NULL)
new_bb_cbk (jump);
}
/* Creates a new basic block just after basic block B by splitting
everything after specified instruction I. */
basic_block (*split_block) (basic_block b, void * i);
-
+
/* Move block B immediately after block A. */
bool (*move_block_after) (basic_block b, basic_block a);
free_original_copy_tables ();
if (stay_in_cfglayout_mode)
initialize_original_copy_tables ();
-
+
/* Finally, put basic_block_info in the new order. */
compact_blocks ();
}
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->flags & EDGE_FALLTHRU)
break;
-
+
if (e && !can_fallthru (e->src, e->dest))
force_nonfallthru (e);
}
profile is usually too flat and unreliable for this (and it is mostly based
on the loop structure of the program, so it does not make much sense to
derive the loop structure from it). */
-
+
static edge
find_subloop_latch_edge_by_profile (VEC (edge, heap) *latches)
{
edge_iterator ei;
edge e, new_entry;
struct loop *new_loop;
-
+
mfb_reis_set = pointer_set_create ();
FOR_EACH_EDGE (e, ei, loop->header->preds)
{
/* Gets body of a LOOP sorted via provided BB_COMPARATOR. */
basic_block *
-get_loop_body_in_custom_order (const struct loop *loop,
+get_loop_body_in_custom_order (const struct loop *loop,
int (*bb_comparator) (const void *, const void *))
{
basic_block *bbs = get_loop_body (loop);
for (; exit; exit = next)
{
next = exit->next_e;
-
+
exit->next->prev = exit->prev;
exit->prev->next = exit->next;
exit->next_e = exits;
exits = exit;
}
- }
+ }
if (!exits && new_edge)
return;
exit = get_exit_descriptions (e);
if (!exit)
{
- error ("Exit %d->%d not recorded",
+ error ("Exit %d->%d not recorded",
e->src->index, e->dest->index);
err = 1;
}
if (eloops != 0)
{
- error ("Wrong list of exited loops for edge %d->%d",
+ error ("Wrong list of exited loops for edge %d->%d",
e->src->index, e->dest->index);
err = 1;
}
b) it is consistent with the result of number_of_iterations_exit. */
double_int bound;
- /* True if the statement will cause the loop to be leaved the (at most)
+ /* True if the statement will cause the loop to be leaved the (at most)
BOUND + 1-st time it is executed, that is, all the statements after it
are executed at most BOUND times. */
bool is_exit;
unsigned);
extern basic_block *get_loop_body_in_dom_order (const struct loop *);
extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
-extern basic_block *get_loop_body_in_custom_order (const struct loop *,
+extern basic_block *get_loop_body_in_custom_order (const struct loop *,
int (*) (const void *, const void *));
extern VEC (edge, heap) *get_loop_exit_edges (const struct loop *);
tree *, tree *, struct loop *);
extern struct loop * duplicate_loop (struct loop *, struct loop *);
extern void duplicate_subloops (struct loop *, struct loop *);
-extern bool duplicate_loop_to_header_edge (struct loop *, edge,
+extern bool duplicate_loop_to_header_edge (struct loop *, edge,
unsigned, sbitmap, edge,
VEC (edge, heap) **, int);
extern struct loop *loopify (edge, edge,
placement of subloops of FROM->loop_father, that might also be altered due
to this change; the condition for them is similar, except that instead of
successors we consider edges coming out of the loops.
-
+
If the changes may invalidate the information about irreducible regions,
IRRED_INVALIDATED is set to true. */
{
SET_BIT (seen, ae->dest->index);
bord_bbs[n_bord_bbs++] = ae->dest;
-
+
if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
irred_invalidated = true;
}
}
/* Creates an if region as shown above. CONDITION is used to create
- the test for the if.
+ the test for the if.
|
| ------------- -------------
succ_bb = entry_edge->dest;
cond_bb = split_edge (entry_edge);
-
+
/* Insert condition in cond_bb. */
gsi = gsi_last_bb (cond_bb);
simple_cond =
cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
gsi = gsi_last_bb (cond_bb);
gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
-
+
join_bb = split_edge (single_succ_edge (cond_bb));
e_true = single_succ_edge (cond_bb);
condition stated in description of fix_loop_placement holds for them.
It is used in case when we removed some edges coming out of LOOP, which
may cause the right placement of LOOP inside loop tree to change.
-
+
IRRED_INVALIDATED is set to true if a change in the loop structures might
invalidate the information about irreducible regions. */
{
edge e;
edge_iterator ei;
-
+
FOR_EACH_EDGE (e, ei, block->preds)
if (e->src->loop_father == loop)
return true;
CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
entry; otherwise we also force preheader block to have only one successor.
When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
- to be a fallthru predecessor to the loop header and to have only
+ to be a fallthru predecessor to the loop header and to have only
predecessors from outside of the loop.
The function also updates dominators. */
if (nentry == 1)
{
bool need_forwarder_block = false;
-
+
/* We do not allow entry block to be the loop preheader, since we
cannot emit code there. */
if (single_entry->src == ENTRY_BLOCK_PTR)
if (dump_file)
fprintf (dump_file, "Created preheader block for loop %i\n",
loop->num);
-
+
if (flags & CP_FALLTHRU_PREHEADERS)
gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
&& !JUMP_P (BB_END (dummy)));
is the ratio by that the frequencies in the original loop should
be scaled. ELSE_SCALE is the ratio by that the frequencies in the
new loop should be scaled.
-
+
If PLACE_AFTER is true, we place the new loop after LOOP in the
instruction stream, otherwise it is placed before LOOP. */
}
/* Update BLOCK_FOR_INSN of insns between BEGIN and END
- (or BARRIER if found) and notify df of the bb change.
+ (or BARRIER if found) and notify df of the bb change.
The insn chain range is inclusive
(i.e. both BEGIN and END will be updated. */
if (abnormal_edge_flags)
make_edge (src, target, abnormal_edge_flags);
- df_mark_solutions_dirty ();
+ df_mark_solutions_dirty ();
return new_bb;
}
s_indent = (char *) alloca ((size_t) indent + 1);
memset (s_indent, ' ', (size_t) indent);
s_indent[indent] = '\0';
-
+
if (df)
{
df_dump_top (bb, outf);
{
edge e;
edge_iterator ei;
-
+
fprintf (outf, ";; Start of basic block (");
FOR_EACH_EDGE (e, ei, bb->preds)
fprintf (outf, " %d", e->src->index);
}
/* Like cgraph_create_edge walk the clone tree and update all clones sharing
- same function body.
-
+ same function body.
+
TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
frequencies of the clones. */
}
/* Create callgraph node clone with new declaration. The actual body will
- be copied later at compilation stage.
+ be copied later at compilation stage.
TODO: after merging in ipa-sra use function call notes instead of args_to_skip
bitmap interface.
GIMPLE.
The function is assumed to be reachable and have address taken (so no
- API breaking optimizations are performed on it).
+ API breaking optimizations are performed on it).
Main work done by this function is to enqueue the function for later
processing to avoid need the passes to be re-entrant. */
ABSTRACT_DECL_ORIGIN of a reachable function. */
unsigned abstract_and_needed : 1;
/* Set when function is reachable by call from other function
- that is either reachable or needed.
+ that is either reachable or needed.
This flag is computed at original cgraph construction and then
updated in cgraph_remove_unreachable_nodes. Note that after
cgraph_remove_unreachable_nodes cgraph still can contain unreachable
cgraph_inline_failed_t inline_failed;
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
- /* Expected frequency of executions within the function.
+ /* Expected frequency of executions within the function.
When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
per function call. The range is 0 to CGRAPH_FREQ_MAX. */
int frequency;
struct GTY(()) constant_descriptor_tree {
/* A MEM for the constant. */
rtx rtl;
-
+
/* The value of the constant. */
tree value;
#include "tree-pass.h"
/* Walk tree and record all calls and references to functions/variables.
- Called via walk_tree: TP is pointer to tree to be examined.
+ Called via walk_tree: TP is pointer to tree to be examined.
When DATA is non-null, record references to callgraph.
*/
};
/* Record references to functions and other variables present in the
- initial value of DECL, a variable.
+ initial value of DECL, a variable.
When ONLY_VARS is true, we mark needed only variables, not functions. */
void
record_references_in_initializer (tree decl, bool only_vars)
{
struct pointer_set_t *visited_nodes = pointer_set_create ();
- walk_tree (&DECL_INITIAL (decl), record_reference,
+ walk_tree (&DECL_INITIAL (decl), record_reference,
only_vars ? NULL : decl, visited_nodes);
pointer_set_destroy (visited_nodes);
}
/* When target does not have ctors and dtors, we call all constructor
and destructor by special initialization/destruction function
- recognized by collect2.
-
+ recognized by collect2.
+
When we are going to build this function, collect all constructors and
destructors and turn them into normal functions. */
f2 = *(const tree *)p2;
priority1 = DECL_INIT_PRIORITY (f1);
priority2 = DECL_INIT_PRIORITY (f2);
-
+
if (priority1 < priority2)
return -1;
else if (priority1 > priority2)
f2 = *(const tree *)p2;
priority1 = DECL_FINI_PRIORITY (f1);
priority2 = DECL_FINI_PRIORITY (f2);
-
+
if (priority1 < priority2)
return -1;
else if (priority1 > priority2)
{
gcc_assert (!targetm.have_ctors_dtors);
qsort (VEC_address (tree, static_ctors),
- VEC_length (tree, static_ctors),
+ VEC_length (tree, static_ctors),
sizeof (tree),
compare_ctor);
build_cdtor (/*ctor_p=*/true,
VEC_address (tree, static_ctors),
- VEC_length (tree, static_ctors));
+ VEC_length (tree, static_ctors));
VEC_truncate (tree, static_ctors, 0);
}
{
gcc_assert (!targetm.have_ctors_dtors);
qsort (VEC_address (tree, static_dtors),
- VEC_length (tree, static_dtors),
+ VEC_length (tree, static_dtors),
sizeof (tree),
compare_dtor);
build_cdtor (/*ctor_p=*/false,
VEC_address (tree, static_dtors),
- VEC_length (tree, static_dtors));
+ VEC_length (tree, static_dtors));
VEC_truncate (tree, static_dtors, 0);
}
}
TREE_MAP is a mapping of tree nodes we want to replace with
new ones (according to results of prior analysis).
OLD_VERSION_NODE is the node that is versioned.
- It returns the new version's cgraph node.
+ It returns the new version's cgraph node.
ARGS_TO_SKIP lists arguments to be omitted from functions
*/
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
-
+
cgraph_call_function_insertion_hooks (new_version_node);
return new_version_node;
}
DEFCIFCODE(MAX_INLINE_INSNS_AUTO_LIMIT,
N_("--param max-inline-insns-auto limit reached"))
DEFCIFCODE(INLINE_UNIT_GROWTH_LIMIT,
- N_("--param inline-unit-growth limit reached"))
+ N_("--param inline-unit-growth limit reached"))
/* Recursive inlining. */
DEFCIFCODE(RECURSIVE_INLINING, N_("recursive inlining"))
enum scanfilter_masks {
SCAN_NOTHING = 0,
- SCAN_CTOR = 1 << SYM_CTOR,
+ SCAN_CTOR = 1 << SYM_CTOR,
SCAN_DTOR = 1 << SYM_DTOR,
SCAN_INIT = 1 << SYM_INIT,
SCAN_FINI = 1 << SYM_FINI,
char **ld1_argv;
const char **ld1;
bool use_plugin = false;
-
+
/* The kinds of symbols we will have to consider when scanning the
outcome of a first pass link. This is ALL to start with, then might
be adjusted before getting to the first pass link per se, typically on
would otherwise reference them all, hence drag all the corresponding
objects even if nothing else is referenced. */
{
- const char **export_object_lst
+ const char **export_object_lst
= CONST_CAST2 (const char **, char **, object_lst);
-
+
struct id *list = libs.first;
/* Compute the filter to use from the current one, do scan, then adjust
the "current" filter to remove what we just included here. This will
control whether we need a first pass link later on or not, and what
will remain to be scanned there. */
-
+
scanfilter this_filter
= shared_obj ? ld1_filter : (ld1_filter & ~SCAN_DWEH);
-
+
while (export_object_lst < object)
scan_prog_file (*export_object_lst++, PASS_OBJ, this_filter);
-
+
for (; list; list = list->next)
scan_prog_file (list->name, PASS_FIRST, this_filter);
-
+
ld1_filter = ld1_filter & ~this_filter;
}
/* Load the program, searching all libraries and attempting to provide
undefined symbols from repository information.
-
+
If -r or they will be run via some other method, do not build the
- constructor or destructor list, just return now. */
+ constructor or destructor list, just return now. */
{
bool early_exit
= rflag || (! DO_COLLECT_EXPORT_LIST && ! do_collecting);
objects and libraries has performed above. In the !shared_obj case, we
expect the relevant tables to be dragged together with their associated
functions from precise cross reference insertions by the compiler. */
-
+
if (early_exit || ld1_filter != SCAN_NOTHING)
do_tlink (ld1_argv, object_lst);
-
+
if (early_exit)
{
#ifdef COLLECT_EXPORT_LIST
/* Subroutine of try_combine. Determine whether the combine replacement
patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
insn_rtx_cost that the original instruction sequence I1, I2, I3 and
- undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX.
+ undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX.
NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This
function returns false, if the costs of all instructions can be
estimated, and the replacements are more expensive than the original
register and establishing log links when def is encountered.
Note that we do not clear next_use array in order to save time,
so we have to test whether the use is in the same basic block as def.
-
+
There are a few cases below when we do not consider the definition or
usage -- these are taken from original flow.c did. Don't ask me why it is
done this way; I don't know and if it works, I don't want to know. */
mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
- /* The mode and signedness of the argument as it is actually passed,
+ /* The mode and signedness of the argument as it is actually passed,
after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */
mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
TREE_TYPE (cfun->decl), 0);
if (newi2pat && new_i2_notes)
distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
-
+
if (new_i3_notes)
distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
}
df_insn_rescan (i3);
}
-
+
/* Set new_direct_jump_p if a new return or simple jump instruction
has been created. Adjust the CFG accordingly. */
*new_direct_jump_p = 1;
update_cfg_for_uncondjump (i3);
}
-
+
combine_successes++;
undo_commit ();
}
/* Try simplify a*(b/c) as (a*b)/c. */
- if (FLOAT_MODE_P (mode) && flag_associative_math
+ if (FLOAT_MODE_P (mode) && flag_associative_math
&& GET_CODE (XEXP (x, 0)) == DIV)
{
rtx tem = simplify_binary_operation (MULT, mode,
if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
&& ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
return -1;
- if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
+ if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
{
gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
notes = alloc_reg_note (REG_UNUSED,
tree s_intype = TREE_TYPE (s_expr);
const enum built_in_function fcode = builtin_mathfn_code (s_expr);
tree fn = 0;
-
+
switch (fcode)
{
CASE_FLT_FN (BUILT_IN_CEIL):
default:
break;
}
-
+
if (fn)
{
tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
tree s_intype = TREE_TYPE (s_expr);
const enum built_in_function fcode = builtin_mathfn_code (s_expr);
tree fn = 0;
-
+
switch (fcode)
{
CASE_FLT_FN (BUILT_IN_LOGB):
/* Read and discard the stamp. */
gcov_read_unsigned ();
-
+
counts_hash = htab_create (10,
htab_counts_entry_hash, htab_counts_entry_eq,
htab_counts_entry_del);
varpool_finalize_decl (tree_ctr_tables[counter]);
value = tree_cons (fields,
- build1 (ADDR_EXPR, TREE_TYPE (fields),
+ build1 (ADDR_EXPR, TREE_TYPE (fields),
tree_ctr_tables[counter]),
value);
}
int len = strlen (filename);
/* + 1 for extra '/', in case prefix doesn't end with /. */
int prefix_len;
-
+
if (profile_data_prefix == 0 && filename[0] != '/')
profile_data_prefix = getpwd ();
prefix_len = (profile_data_prefix) ? strlen (profile_data_prefix) + 1 : 0;
/* Name of da file. */
- da_file_name = XNEWVEC (char, len + strlen (GCOV_DATA_SUFFIX)
+ da_file_name = XNEWVEC (char, len + strlen (GCOV_DATA_SUFFIX)
+ prefix_len + 1);
if (profile_data_prefix)
declaration for functions that we want to have weak references.
Neither way is particularly good. */
-
+
/* References to __register_frame_info and __deregister_frame_info should
be weak in this file if at all possible. */
extern void __register_frame_info (const void *, struct object *)
extern void __cxa_finalize (void *) TARGET_ATTRIBUTE_WEAK;
/* Run all the global destructors on exit from the program. */
-
+
/* Some systems place the number of pointers in the first word of the
table. On SVR4 however, that word is -1. In all cases, the table is
null-terminated. On SVR4, we start from the beginning of the list and
#ifdef JCR_SECTION_NAME
/* Null terminate the .jcr section array. */
-STATIC void *__JCR_END__[1]
+STATIC void *__JCR_END__[1]
__attribute__ ((unused, section(JCR_SECTION_NAME),
aligned(sizeof(void *))))
= { 0 };
return hash;
}
-/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
+/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
When the callback returns true, we continue with the new rtx. */
unsigned
return hash;
/* Invoke the callback first. */
- if (cb != NULL
+ if (cb != NULL
&& ((*cb) (x, mode, &newx, &newmode)))
{
hash += hash_rtx_cb (newx, newmode, do_not_record_p,
{
elt = CONST_VECTOR_ELT (x, i);
hash += hash_rtx_cb (elt, GET_MODE (elt),
- do_not_record_p, hash_arg_in_memory_p,
+ do_not_record_p, hash_arg_in_memory_p,
have_reg_qty, cb);
}
x = XEXP (x, i);
goto repeat;
}
-
+
hash += hash_rtx_cb (XEXP (x, i), VOIDmode, do_not_record_p,
hash_arg_in_memory_p,
have_reg_qty, cb);
They could e.g. be two different entities allocated into the
same space on the stack (see e.g. PR25130). In that case, the
MEM addresses can be the same, even though the two MEMs are
- absolutely not equivalent.
-
+ absolutely not equivalent.
+
But because really all MEM attributes should be the same for
equivalent MEMs, we just use the invariant that MEMs that have
the same attributes share the same mem_attrs data structure. */
constant through simplifications. */
p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
mode_arg0);
-
+
if (p != NULL)
{
cheapest_simplification = x;
describe the path.
It is filled with a queue of basic blocks, starting with FIRST_BB
and following a trace through the CFG.
-
+
If all paths starting at FIRST_BB have been followed, or no new path
starting at FIRST_BB can be constructed, this function returns FALSE.
Otherwise, DATA->path is filled and the function returns TRUE indicating
basic_block bb;
edge e;
int path_size;
-
+
SET_BIT (cse_visited_basic_blocks, first_bb->index);
/* See if there is a previous path. */
int path_entry;
/* Scan to end of each basic block in the path. */
- for (path_entry = 0; path_entry < path_size; path_entry++)
+ for (path_entry = 0; path_entry < path_size; path_entry++)
{
basic_block bb;
rtx insn;
&& GET_MODE (*loc) != GET_MODE (args->newreg))
{
validate_change (args->insn, loc, args->newreg, 1);
-
+
return -1;
}
return 0;
args.insn = insn;
args.newreg = newreg;
-
+
f